summaryrefslogtreecommitdiffstats
path: root/src/tools
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools')
-rw-r--r--src/tools/build-manifest/Cargo.toml2
-rw-r--r--src/tools/build-manifest/src/main.rs5
-rw-r--r--src/tools/build_helper/Cargo.toml2
-rw-r--r--src/tools/build_helper/src/ci.rs54
-rw-r--r--src/tools/build_helper/src/git.rs8
-rw-r--r--src/tools/build_helper/src/lib.rs1
-rw-r--r--src/tools/build_helper/src/metrics.rs92
-rw-r--r--src/tools/build_helper/src/util.rs6
-rw-r--r--src/tools/cargo/.cargo/config.toml2
-rw-r--r--src/tools/cargo/.github/renovate.json538
-rw-r--r--src/tools/cargo/.github/workflows/main.yml96
-rw-r--r--src/tools/cargo/CHANGELOG.md108
-rw-r--r--src/tools/cargo/Cargo.lock701
-rw-r--r--src/tools/cargo/Cargo.toml151
-rw-r--r--src/tools/cargo/benches/benchsuite/Cargo.toml4
-rw-r--r--src/tools/cargo/benches/capture/Cargo.toml4
-rwxr-xr-xsrc/tools/cargo/ci/validate-version-bump.sh45
-rw-r--r--src/tools/cargo/crates/cargo-platform/Cargo.toml6
-rw-r--r--src/tools/cargo/crates/cargo-test-macro/Cargo.toml4
-rw-r--r--src/tools/cargo/crates/cargo-test-support/Cargo.toml4
-rw-r--r--src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile2
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/compare.rs1
-rw-r--r--src/tools/cargo/crates/cargo-test-support/src/registry.rs36
-rw-r--r--src/tools/cargo/crates/cargo-util/Cargo.toml8
-rw-r--r--src/tools/cargo/crates/cargo-util/src/paths.rs20
-rw-r--r--src/tools/cargo/crates/cargo-util/src/process_builder.rs4
-rw-r--r--src/tools/cargo/crates/crates-io/Cargo.toml8
-rw-r--r--src/tools/cargo/crates/crates-io/lib.rs167
-rw-r--r--src/tools/cargo/crates/home/Cargo.toml7
-rw-r--r--src/tools/cargo/crates/mdman/Cargo.toml4
-rw-r--r--src/tools/cargo/crates/resolver-tests/Cargo.toml2
-rw-r--r--src/tools/cargo/crates/semver-check/Cargo.toml2
-rw-r--r--src/tools/cargo/crates/semver-check/src/main.rs13
-rw-r--r--src/tools/cargo/crates/xtask-build-man/Cargo.toml2
-rw-r--r--src/tools/cargo/crates/xtask-bump-check/Cargo.toml14
-rw-r--r--src/tools/cargo/crates/xtask-bump-check/src/main.rs (renamed from src/tools/cargo/crates/xtask-unpublished/src/main.rs)14
-rw-r--r--src/tools/cargo/crates/xtask-bump-check/src/xtask.rs423
-rw-r--r--src/tools/cargo/crates/xtask-stale-label/Cargo.toml2
-rw-r--r--src/tools/cargo/crates/xtask-stale-label/src/main.rs2
-rw-r--r--src/tools/cargo/crates/xtask-unpublished/Cargo.toml12
-rw-r--r--src/tools/cargo/crates/xtask-unpublished/src/xtask.rs200
-rw-r--r--src/tools/cargo/credential/cargo-credential-1password/Cargo.toml6
-rw-r--r--src/tools/cargo/credential/cargo-credential-1password/src/main.rs141
-rw-r--r--src/tools/cargo/credential/cargo-credential-gnome-secret/build.rs8
-rw-r--r--src/tools/cargo/credential/cargo-credential-gnome-secret/src/libsecret.rs190
-rw-r--r--src/tools/cargo/credential/cargo-credential-gnome-secret/src/main.rs12
-rw-r--r--src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml (renamed from src/tools/cargo/credential/cargo-credential-gnome-secret/Cargo.toml)13
-rw-r--r--src/tools/cargo/credential/cargo-credential-libsecret/README.md (renamed from src/tools/cargo/credential/cargo-credential-gnome-secret/README.md)2
-rw-r--r--src/tools/cargo/credential/cargo-credential-libsecret/src/lib.rs235
-rw-r--r--src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml6
-rw-r--r--src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs81
-rw-r--r--src/tools/cargo/credential/cargo-credential-macos-keychain/src/main.rs58
-rw-r--r--src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml6
-rw-r--r--src/tools/cargo/credential/cargo-credential-wincred/src/lib.rs125
-rw-r--r--src/tools/cargo/credential/cargo-credential-wincred/src/main.rs122
-rw-r--r--src/tools/cargo/credential/cargo-credential/Cargo.toml18
-rw-r--r--src/tools/cargo/credential/cargo-credential/README.md2
-rw-r--r--src/tools/cargo/credential/cargo-credential/examples/file-provider.rs90
-rw-r--r--src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs25
-rw-r--r--src/tools/cargo/credential/cargo-credential/src/error.rs206
-rw-r--r--src/tools/cargo/credential/cargo-credential/src/lib.rs312
-rw-r--r--src/tools/cargo/credential/cargo-credential/src/secret.rs101
-rw-r--r--src/tools/cargo/credential/cargo-credential/src/stdio.rs163
-rw-r--r--src/tools/cargo/credential/cargo-credential/tests/examples.rs45
-rw-r--r--src/tools/cargo/deny.toml1
-rwxr-xr-xsrc/tools/cargo/publish.py6
-rw-r--r--src/tools/cargo/src/bin/cargo/cli.rs24
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/add.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/bench.rs45
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/build.rs15
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/check.rs12
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/clean.rs8
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/doc.rs22
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/fetch.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/fix.rs52
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/init.rs4
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/install.rs50
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/locate_project.rs6
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/login.rs37
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/logout.rs2
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/metadata.rs6
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/new.rs4
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/owner.rs4
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/package.rs17
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/pkgid.rs9
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/publish.rs19
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/remove.rs108
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/run.rs10
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/rustc.rs38
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/rustdoc.rs12
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/search.rs4
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/test.rs41
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/tree.rs26
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/uninstall.rs9
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/update.rs21
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/vendor.rs4
-rw-r--r--src/tools/cargo/src/bin/cargo/commands/yank.rs6
-rw-r--r--src/tools/cargo/src/bin/cargo/main.rs15
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs12
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs4
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/custom_build.rs12
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs27
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/future_incompat.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs4
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/mod.rs4
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/rustdoc.rs6
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/timings.js10
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/timings.rs4
-rw-r--r--src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/dependency.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/package.rs8
-rw-r--r--src/tools/cargo/src/cargo/core/registry.rs6
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/context.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/dep_cache.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/encode.rs106
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/features.rs14
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/mod.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/resolver/resolve.rs2
-rw-r--r--src/tools/cargo/src/cargo/core/source/source_id.rs74
-rw-r--r--src/tools/cargo/src/cargo/core/workspace.rs2
-rw-r--r--src/tools/cargo/src/cargo/lib.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs4
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_new.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_package.rs105
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_run.rs4
-rw-r--r--src/tools/cargo/src/cargo/ops/cargo_test.rs33
-rw-r--r--src/tools/cargo/src/cargo/ops/fix.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/login.rs144
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/logout.rs26
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/mod.rs18
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/owner.rs8
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/publish.rs14
-rw-r--r--src/tools/cargo/src/cargo/ops/registry/yank.rs8
-rw-r--r--src/tools/cargo/src/cargo/ops/resolve.rs2
-rw-r--r--src/tools/cargo/src/cargo/ops/tree/graph.rs2
-rw-r--r--src/tools/cargo/src/cargo/sources/config.rs2
-rw-r--r--src/tools/cargo/src/cargo/sources/git/known_hosts.rs20
-rw-r--r--src/tools/cargo/src/cargo/sources/git/oxide.rs2
-rw-r--r--src/tools/cargo/src/cargo/sources/git/source.rs7
-rw-r--r--src/tools/cargo/src/cargo/sources/git/utils.rs10
-rw-r--r--src/tools/cargo/src/cargo/sources/path.rs6
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/download.rs9
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/http_remote.rs44
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/index.rs36
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/mod.rs9
-rw-r--r--src/tools/cargo/src/cargo/sources/registry/remote.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/auth/asymmetric.rs155
-rw-r--r--src/tools/cargo/src/cargo/util/auth/mod.rs872
-rw-r--r--src/tools/cargo/src/cargo/util/command_prelude.rs146
-rw-r--r--src/tools/cargo/src/cargo/util/config/mod.rs35
-rw-r--r--src/tools/cargo/src/cargo/util/config/path.rs25
-rw-r--r--src/tools/cargo/src/cargo/util/config/target.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/credential/adaptor.rs68
-rw-r--r--src/tools/cargo/src/cargo/util/credential/mod.rs8
-rw-r--r--src/tools/cargo/src/cargo/util/credential/paseto.rs224
-rw-r--r--src/tools/cargo/src/cargo/util/credential/process.rs80
-rw-r--r--src/tools/cargo/src/cargo/util/credential/token.rs96
-rw-r--r--src/tools/cargo/src/cargo/util/diagnostic_server.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/errors.rs17
-rw-r--r--src/tools/cargo/src/cargo/util/job.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/mod.rs1
-rw-r--r--src/tools/cargo/src/cargo/util/network/http.rs39
-rw-r--r--src/tools/cargo/src/cargo/util/network/mod.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/network/sleep.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/restricted_names.rs15
-rw-r--r--src/tools/cargo/src/cargo/util/rustc.rs2
-rw-r--r--src/tools/cargo/src/cargo/util/toml/embedded.rs37
-rw-r--r--src/tools/cargo/src/cargo/util/toml/mod.rs10
-rw-r--r--src/tools/cargo/src/cargo/util/toml_mut/dependency.rs6
-rw-r--r--src/tools/cargo/src/doc/contrib/src/implementation/debugging.md15
-rw-r--r--src/tools/cargo/src/doc/contrib/src/process/release.md37
-rw-r--r--src/tools/cargo/src/doc/man/cargo-metadata.md44
-rw-r--r--src/tools/cargo/src/doc/man/cargo-test.md1
-rw-r--r--src/tools/cargo/src/doc/man/cargo-yank.md2
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt45
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt5
-rw-r--r--src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt2
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-metadata.md44
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-test.md6
-rw-r--r--src/tools/cargo/src/doc/src/commands/cargo-yank.md2
-rw-r--r--src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md2
-rw-r--r--src/tools/cargo/src/doc/src/reference/build-scripts.md53
-rw-r--r--src/tools/cargo/src/doc/src/reference/config.md14
-rw-r--r--src/tools/cargo/src/doc/src/reference/environment-variables.md6
-rw-r--r--src/tools/cargo/src/doc/src/reference/external-tools.md3
-rw-r--r--src/tools/cargo/src/doc/src/reference/manifest.md22
-rw-r--r--src/tools/cargo/src/doc/src/reference/overriding-dependencies.md6
-rw-r--r--src/tools/cargo/src/doc/src/reference/profiles.md15
-rw-r--r--src/tools/cargo/src/doc/src/reference/resolver.md7
-rw-r--r--src/tools/cargo/src/doc/src/reference/semver.md858
-rw-r--r--src/tools/cargo/src/doc/src/reference/specifying-dependencies.md2
-rw-r--r--src/tools/cargo/src/doc/src/reference/unstable.md328
-rw-r--r--src/tools/cargo/src/doc/src/reference/workspaces.md23
-rw-r--r--src/tools/cargo/src/etc/_cargo12
-rw-r--r--src/tools/cargo/src/etc/cargo.bashcomp.sh7
-rw-r--r--src/tools/cargo/src/etc/man/cargo-metadata.149
-rw-r--r--src/tools/cargo/src/etc/man/cargo-test.17
-rw-r--r--src/tools/cargo/src/etc/man/cargo-yank.12
-rw-r--r--src/tools/cargo/tests/testsuite/alt_registry.rs41
-rw-r--r--src/tools/cargo/tests/testsuite/bench.rs61
-rw-r--r--src/tools/cargo/tests/testsuite/build_script.rs29
-rw-r--r--src/tools/cargo/tests/testsuite/build_script_env.rs10
-rw-r--r--src/tools/cargo/tests/testsuite/cargo/help/mod.rs12
-rw-r--r--src/tools/cargo/tests/testsuite/cargo/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/auto_git/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo/help/stdout.log39
-rw-r--r--src/tools/cargo/tests/testsuite/cargo/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log124
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_add/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_bench/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_bench/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log59
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_bench/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_build/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_build/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log58
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_build/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_check/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_check/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log56
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_check/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_clean/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_clean/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log29
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_clean/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_config/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_config/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log18
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_config/mod.rs (renamed from src/tools/cargo/tests/testsuite/cargo_config.rs)2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_doc/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_doc/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log53
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_doc/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fetch/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fetch/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fetch/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fix/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fix/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log60
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_fix/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log19
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_generate_lockfile/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_git_checkout/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stdout.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_git_checkout/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_help/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_help/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log18
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_help/mod.rs1
l---------src/tools/cargo/tests/testsuite/cargo_init/auto_git/in (renamed from src/tools/cargo/tests/testsuite/init/auto_git/in)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/auto_git/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/auto_git/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/auto_git/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/auto_git/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/auto_git/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/auto_git/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/auto_git/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/in/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/out/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/formats_source/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/in/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/out/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/in/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/out/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/in/case.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/out/case.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/in/src/case.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/out/src/case.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/in/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/out/main.rs (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/in/case.rs (renamed from src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/in/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/in/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/in/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/out/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/in/case.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/in/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/case.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/in/case.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/out/case.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/in/case.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/out/case.rs (renamed from src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/empty_dir/.keep (renamed from src/tools/cargo/tests/testsuite/init/empty_dir/.keep)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/empty_dir/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs)0
l---------src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/in (renamed from src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/out/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/in/rustfmt.toml (renamed from src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/formats_source/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/rustfmt.toml (renamed from src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/formats_source/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/formats_source/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/no_filename/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/in/.fossil/.keep (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/.fossil-settings/clean-glob (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/.fossil-settings/ignore-glob (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/help/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/README.md)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log31
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/in/rustfmt.toml (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/rustfmt.toml (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/in/main.rs (renamed from src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/out/main.rs (renamed from src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/simple_git/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/in/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/out/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/README.md (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/crates/foo/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/crates/foo/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/crates/foo/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/crates/foo/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/crates/foo/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/crates/foo/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/in/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/out/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/in/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/with_argument/stdout.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/mod.rs)1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/in/case.rs (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/in/main.rs (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/out/case.rs (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/out/main.rs (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/no_filename/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/no_filename/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/no_filename/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/no_filename/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/no_filename/stdout.log (renamed from src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/in/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/out/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/in/.pijul/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/.ignore (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/reserved_name/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_init/simple_bin/in (renamed from src/tools/cargo/tests/testsuite/init/simple_bin/in)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_bin/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_bin/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_bin/out/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_init/simple_git/in (renamed from src/tools/cargo/tests/testsuite/init/simple_git/in)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_git/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/simple_git/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_init/simple_hg/in (renamed from src/tools/cargo/tests/testsuite/init/simple_hg/in)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stdout.log0
l---------src/tools/cargo/tests/testsuite/cargo_init/simple_lib/in (renamed from src/tools/cargo/tests/testsuite/init/simple_lib/in)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_lib/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_lib/out/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_lib/out/src/lib.rs (renamed from src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/with_argument/in/foo/.keep0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/with_argument/mod.rs (renamed from src/tools/cargo/tests/testsuite/init/with_argument/mod.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/with_argument/out/foo/Cargo.toml (renamed from src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/with_argument/out/foo/src/main.rs (renamed from src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/with_argument/stderr.log (renamed from src/tools/cargo/tests/testsuite/init/with_argument/stderr.log)0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_init/with_argument/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_install/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_install/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log56
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_install/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_locate_project/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_locate_project/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log22
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_locate_project/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_login/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_login/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log23
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_login/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_logout/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_logout/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log19
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_logout/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_metadata/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_metadata/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log30
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_metadata/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log31
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_new/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_owner/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_owner/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log27
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_owner/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_package/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_package/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log39
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_package/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_pkgid/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_pkgid/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log25
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_pkgid/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_publish/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_publish/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log39
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_publish/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_read_manifest/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log19
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_read_manifest/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/Cargo.toml9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/mod.rs27
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/Cargo.toml6
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/Cargo.toml8
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/src/lib.rs0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stdout.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs9
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.lock19
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log29
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/mod.rs2
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_report/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_report/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log20
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_report/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_run/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_run/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log47
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_run/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustc/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustc/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log58
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustc/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustdoc/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log56
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_rustdoc/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_search/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_search/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log24
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_search/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_test/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_test/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_test/help/stdout.log63
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_test/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_tree/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_tree/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log45
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_tree/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_uninstall/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_uninstall/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log28
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_uninstall/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_update/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_update/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_update/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_vendor/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_vendor/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log27
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_vendor/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_verify_project/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_verify_project/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log19
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_verify_project/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_version/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_version/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log18
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_version/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_yank/help/mod.rs13
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_yank/help/stderr.log0
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log26
-rw-r--r--src/tools/cargo/tests/testsuite/cargo_yank/mod.rs1
-rw-r--r--src/tools/cargo/tests/testsuite/credential_process.rs640
-rw-r--r--src/tools/cargo/tests/testsuite/freshness.rs62
-rw-r--r--src/tools/cargo/tests/testsuite/lints.rs55
-rw-r--r--src/tools/cargo/tests/testsuite/lockfile_compat.rs195
-rw-r--r--src/tools/cargo/tests/testsuite/login.rs188
-rw-r--r--src/tools/cargo/tests/testsuite/main.rs36
-rw-r--r--src/tools/cargo/tests/testsuite/owner.rs8
-rw-r--r--src/tools/cargo/tests/testsuite/package.rs112
-rw-r--r--src/tools/cargo/tests/testsuite/profile_targets.rs8
-rw-r--r--src/tools/cargo/tests/testsuite/publish.rs10
-rw-r--r--src/tools/cargo/tests/testsuite/registry.rs35
-rw-r--r--src/tools/cargo/tests/testsuite/registry_auth.rs96
-rw-r--r--src/tools/cargo/tests/testsuite/run.rs22
-rw-r--r--src/tools/cargo/tests/testsuite/script.rs158
-rw-r--r--src/tools/cargo/tests/testsuite/test.rs71
-rw-r--r--src/tools/cargo/tests/testsuite/update.rs40
-rw-r--r--src/tools/cargo/tests/testsuite/yank.rs8
-rw-r--r--src/tools/clippy/.github/workflows/clippy.yml2
-rw-r--r--src/tools/clippy/.github/workflows/clippy_bors.yml31
-rw-r--r--src/tools/clippy/CHANGELOG.md197
-rw-r--r--src/tools/clippy/Cargo.toml13
-rw-r--r--src/tools/clippy/README.md2
-rw-r--r--src/tools/clippy/book/src/README.md2
-rw-r--r--src/tools/clippy/book/src/development/infrastructure/changelog_update.md22
-rw-r--r--src/tools/clippy/book/src/development/speedtest.md24
-rw-r--r--src/tools/clippy/book/src/lint_configuration.md23
-rw-r--r--src/tools/clippy/clippy_dev/src/lib.rs2
-rw-r--r--src/tools/clippy/clippy_dev/src/main.rs2
-rw-r--r--src/tools/clippy/clippy_dev/src/new_lint.rs26
-rw-r--r--src/tools/clippy/clippy_dev/src/setup/intellij.rs2
-rw-r--r--src/tools/clippy/clippy_dev/src/setup/vscode.rs2
-rw-r--r--src/tools/clippy/clippy_dev/src/update_lints.rs5
-rw-r--r--src/tools/clippy/clippy_lints/Cargo.toml2
-rw-r--r--src/tools/clippy/clippy_lints/src/absolute_paths.rs100
-rw-r--r--src/tools/clippy/clippy_lints/src/allow_attributes.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/assertions_on_constants.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/attrs.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/box_default.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/mod.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs64
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/copies.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/copy_iterator.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/dbg_macro.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/declared_lints.rs23
-rw-r--r--src/tools/clippy/clippy_lints/src/default.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs20
-rw-r--r--src/tools/clippy/clippy_lints/src/dereference.rs1070
-rw-r--r--src/tools/clippy/clippy_lints/src/derivable_impls.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/derive.rs38
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_methods.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/disallowed_names.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/doc.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/drop_forget_ref.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_drop.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_enum.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/endian_bytes.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/entry.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/enum_clike.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/error_impl_error.rs87
-rw-r--r--src/tools/clippy/clippy_lints/src/escape.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/eta_reduction.rs337
-rw-r--r--src/tools/clippy/clippy_lints/src/excessive_nesting.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/explicit_write.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/fn_null_check.rs102
-rw-r--r--src/tools/clippy/clippy_lints/src/format.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/format_args.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/format_impl.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/four_forward_slashes.rs99
-rw-r--r--src/tools/clippy/clippy_lints/src/from_over_into.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/must_use.rs17
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/result.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/future_not_send.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/if_let_mutex.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_return.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/incorrect_impls.rs213
-rw-r--r--src/tools/clippy/clippy_lints/src/index_refutable_slice.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/indexing_slicing.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/inherent_impl.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/inherent_to_string.rs35
-rw-r--r--src/tools/clippy/clippy_lints/src/init_numbered_fields.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/instant_subtraction.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/items_after_test_module.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/large_const_arrays.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/large_enum_variant.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/large_futures.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/large_include_file.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/large_stack_frames.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/len_zero.rs50
-rw-r--r--src/tools/clippy/clippy_lints/src/let_if_seq.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/let_underscore.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/lifetimes.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/literal_representation.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_find.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/never_loop.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/utils.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs16
-rw-r--r--src/tools/clippy/clippy_lints/src/macro_use.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_bits.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_clamp.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_float_methods.rs175
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_let_else.rs49
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_range_patterns.rs31
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/manual_strip.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/map_unit_fn.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/match_result_ok.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_filter.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_map.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/manual_utils.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/mod.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs196
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs152
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/single_match.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/matches/try_err.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bytecount.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/drain_collect.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/err_expect.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/expect_used.rs44
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map.rs324
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/filter_next.rs51
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/format_collect.rs33
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/get_first.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_skip_zero.rs34
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs18
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_clone.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_flatten.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_identity.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/mod.rs351
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/needless_collect.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/ok_expect.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/open_options.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs124
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs74
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/readonly_write_lock.rs52
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/str_splitn.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/string_lit_chars_any.rs58
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs62
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs54
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unwrap_expect_used.rs83
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs53
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/useless_asref.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/utils.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/min_ident_chars.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_assert_message.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs20
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_inline.rs1
-rw-r--r--src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/module_style.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_key.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/mut_reference.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_bool.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_else.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_for_each.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_if.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_late_init.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs441
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs24
-rw-r--r--src/tools/clippy/clippy_lints/src/needless_question_mark.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/new_without_default.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/no_effect.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/non_copy_const.rs75
-rw-r--r--src/tools/clippy/clippy_lints/src/non_expressive_names.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs36
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/bit_mask.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs207
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/eq_op.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/mod.rs43
-rw-r--r--src/tools/clippy/clippy_lints/src/operators/op_ref.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/option_env_unwrap.rs39
-rw-r--r--src/tools/clippy/clippy_lints/src/option_if_let_else.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/panic_unimplemented.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/partialeq_to_none.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr.rs41
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/question_mark.rs72
-rw-r--r--src/tools/clippy/clippy_lints/src/question_mark_used.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/ranges.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/raw_strings.rs14
-rw-r--r--src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_async_block.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_closure_call.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_locals.rs126
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_slicing.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/reference.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/regex.rs66
-rw-r--r--src/tools/clippy/clippy_lints/src/renamed_lints.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/returns.rs90
-rw-r--r--src/tools/clippy/clippy_lints/src/self_named_constructors.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/semicolon_block.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/shadow.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs65
-rw-r--r--src/tools/clippy/clippy_lints/src/single_call_fn.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/single_component_path_imports.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/size_of_ref.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs133
-rw-r--r--src/tools/clippy/clippy_lints/src/std_instead_of_core.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/strings.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs49
-rw-r--r--src/tools/clippy/clippy_lints/src/swap.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/temporary_assignment.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/to_digit_is_some.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/utils.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs324
-rw-r--r--src/tools/clippy/clippy_lints/src/types/borrowed_box.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/types/box_collection.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/types/linked_list.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/types/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/types/option_option.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/types/rc_buffer.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/types/rc_mutex.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs9
-rw-r--r--src/tools/clippy/clippy_lints/src/types/vec_box.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs11
-rw-r--r--src/tools/clippy/clippy_lints/src/uninit_vec.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs8
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/unnamed_address.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs26
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs5
-rw-r--r--src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs7
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_async.rs108
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_io_amount.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/unused_unit.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/unwrap.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/use_self.rs21
-rw-r--r--src/tools/clippy/clippy_lints/src/useless_conversion.rs15
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/author.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/conf.rs12
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs13
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs3
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/utils/mod.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/vec.rs10
-rw-r--r--src/tools/clippy/clippy_lints/src/vec_init_then_push.rs4
-rw-r--r--src/tools/clippy/clippy_lints/src/visibility.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/wildcard_imports.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/write.rs22
-rw-r--r--src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs4
-rw-r--r--src/tools/clippy/clippy_test_deps/Cargo.toml23
-rw-r--r--src/tools/clippy/clippy_test_deps/src/lib.rs14
-rw-r--r--src/tools/clippy/clippy_utils/Cargo.toml2
-rw-r--r--src/tools/clippy/clippy_utils/src/ast_utils.rs12
-rw-r--r--src/tools/clippy/clippy_utils/src/attrs.rs7
-rw-r--r--src/tools/clippy/clippy_utils/src/check_proc_macro.rs24
-rw-r--r--src/tools/clippy/clippy_utils/src/comparisons.rs4
-rw-r--r--src/tools/clippy/clippy_utils/src/consts.rs32
-rw-r--r--src/tools/clippy/clippy_utils/src/eager_or_lazy.rs39
-rw-r--r--src/tools/clippy/clippy_utils/src/higher.rs9
-rw-r--r--src/tools/clippy/clippy_utils/src/hir_utils.rs32
-rw-r--r--src/tools/clippy/clippy_utils/src/lib.rs420
-rw-r--r--src/tools/clippy/clippy_utils/src/macros.rs20
-rw-r--r--src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs12
-rw-r--r--src/tools/clippy/clippy_utils/src/mir/possible_origin.rs2
-rw-r--r--src/tools/clippy/clippy_utils/src/paths.rs16
-rw-r--r--src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs66
-rw-r--r--src/tools/clippy/clippy_utils/src/source.rs3
-rw-r--r--src/tools/clippy/clippy_utils/src/sugg.rs8
-rw-r--r--src/tools/clippy/clippy_utils/src/ty.rs282
-rw-r--r--src/tools/clippy/clippy_utils/src/ty/type_certainty/certainty.rs122
-rw-r--r--src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs320
-rw-r--r--src/tools/clippy/clippy_utils/src/usage.rs55
-rw-r--r--src/tools/clippy/clippy_utils/src/visitors.rs16
-rw-r--r--src/tools/clippy/declare_clippy_lint/Cargo.toml2
-rw-r--r--src/tools/clippy/lintcheck/src/config.rs3
-rw-r--r--src/tools/clippy/lintcheck/src/main.rs4
-rw-r--r--src/tools/clippy/lintcheck/src/recursive.rs3
-rw-r--r--src/tools/clippy/rust-toolchain2
-rw-r--r--src/tools/clippy/rustfmt.toml1
-rw-r--r--src/tools/clippy/src/driver.rs9
-rw-r--r--src/tools/clippy/src/main.rs3
-rw-r--r--src/tools/clippy/tests/compile-test.rs199
-rw-r--r--src/tools/clippy/tests/integration.rs24
-rw-r--r--src/tools/clippy/tests/lint_message_convention.rs6
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs1
-rw-r--r--src/tools/clippy/tests/ui-internal/custom_ice_message.stderr5
-rw-r--r--src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.allow_crates.stderr28
-rw-r--r--src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.disallow_crates.stderr70
-rw-r--r--src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.rs97
-rw-r--r--src/tools/clippy/tests/ui-toml/absolute_paths/allow_crates/clippy.toml2
-rw-r--r--src/tools/clippy/tests/ui-toml/absolute_paths/auxiliary/helper.rs11
-rw-r--r--src/tools/clippy/tests/ui-toml/absolute_paths/disallow_crates/clippy.toml1
-rw-r--r--src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr2
-rw-r--r--src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs11
-rw-r--r--src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs1
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr4
-rw-r--r--src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs4
-rw-r--r--src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr125
-rw-r--r--src/tools/clippy/tests/ui/arc_with_non_send_sync.rs30
-rw-r--r--src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr6
-rw-r--r--src/tools/clippy/tests/ui/arithmetic_side_effects.rs12
-rw-r--r--src/tools/clippy/tests/ui/as_conversions.rs3
-rw-r--r--src/tools/clippy/tests/ui/as_conversions.stderr6
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs3
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs4
-rw-r--r--src/tools/clippy/tests/ui/auxiliary/proc_macros.rs11
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map.stderr2
-rw-r--r--src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr10
-rw-r--r--src/tools/clippy/tests/ui/bool_comparison.fixed1
-rw-r--r--src/tools/clippy/tests/ui/bool_comparison.rs1
-rw-r--r--src/tools/clippy/tests/ui/bool_comparison.stderr44
-rw-r--r--src/tools/clippy/tests/ui/borrow_box.rs6
-rw-r--r--src/tools/clippy/tests/ui/borrow_box.stderr20
-rw-r--r--src/tools/clippy/tests/ui/comparison_to_empty.fixed12
-rw-r--r--src/tools/clippy/tests/ui/comparison_to_empty.rs12
-rw-r--r--src/tools/clippy/tests/ui/comparison_to_empty.stderr40
-rw-r--r--src/tools/clippy/tests/ui/const_comparisons.rs93
-rw-r--r--src/tools/clippy/tests/ui/const_comparisons.stderr228
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-6256.rs2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-7169.stderr2
-rw-r--r--src/tools/clippy/tests/ui/crashes/ice-8250.stderr2
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.fixed3
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.rs3
-rw-r--r--src/tools/clippy/tests/ui/default_trait_access.stderr16
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof.stderr20
-rw-r--r--src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr6
-rw-r--r--src/tools/clippy/tests/ui/derive.rs6
-rw-r--r--src/tools/clippy/tests/ui/derive.stderr20
-rw-r--r--src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs1
-rw-r--r--src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr16
-rw-r--r--src/tools/clippy/tests/ui/entry.stderr20
-rw-r--r--src/tools/clippy/tests/ui/entry_btree.stderr2
-rw-r--r--src/tools/clippy/tests/ui/entry_with_else.stderr14
-rw-r--r--src/tools/clippy/tests/ui/error_impl_error.rs90
-rw-r--r--src/tools/clippy/tests/ui/error_impl_error.stderr45
-rw-r--r--src/tools/clippy/tests/ui/eta.fixed57
-rw-r--r--src/tools/clippy/tests/ui/eta.rs57
-rw-r--r--src/tools/clippy/tests/ui/eta.stderr8
-rw-r--r--src/tools/clippy/tests/ui/expect.stderr6
-rw-r--r--src/tools/clippy/tests/ui/expect_fun_call.stderr30
-rw-r--r--src/tools/clippy/tests/ui/explicit_auto_deref.stderr78
-rw-r--r--src/tools/clippy/tests/ui/explicit_deref_methods.fixed1
-rw-r--r--src/tools/clippy/tests/ui/explicit_deref_methods.rs1
-rw-r--r--src/tools/clippy/tests/ui/explicit_deref_methods.stderr48
-rw-r--r--src/tools/clippy/tests/ui/explicit_write.stderr26
-rw-r--r--src/tools/clippy/tests/ui/extend_with_drain.stderr8
-rw-r--r--src/tools/clippy/tests/ui/filter_map_bool_then.fixed58
-rw-r--r--src/tools/clippy/tests/ui/filter_map_bool_then.rs58
-rw-r--r--src/tools/clippy/tests/ui/filter_map_bool_then.stderr40
-rw-r--r--src/tools/clippy/tests/ui/filter_map_next_fixable.stderr4
-rw-r--r--src/tools/clippy/tests/ui/fn_null_check.rs22
-rw-r--r--src/tools/clippy/tests/ui/fn_null_check.stderr43
-rw-r--r--src/tools/clippy/tests/ui/format_collect.rs31
-rw-r--r--src/tools/clippy/tests/ui/format_collect.stderr62
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes.fixed48
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes.rs48
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes.stderr68
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes_first_line.fixed7
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes_first_line.rs7
-rw-r--r--src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr15
-rw-r--r--src/tools/clippy/tests/ui/get_first.fixed4
-rw-r--r--src/tools/clippy/tests/ui/get_first.rs4
-rw-r--r--src/tools/clippy/tests/ui/get_first.stderr6
-rw-r--r--src/tools/clippy/tests/ui/get_unwrap.fixed4
-rw-r--r--src/tools/clippy/tests/ui/get_unwrap.rs4
-rw-r--r--src/tools/clippy/tests/ui/get_unwrap.stderr133
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else.rs41
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else.stderr20
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else2.rs2
-rw-r--r--src/tools/clippy/tests/ui/if_same_then_else2.stderr21
-rw-r--r--src/tools/clippy/tests/ui/ifs_same_cond.rs4
-rw-r--r--src/tools/clippy/tests/ui/ifs_same_cond.stderr4
-rw-r--r--src/tools/clippy/tests/ui/ignored_unit_patterns.fixed17
-rw-r--r--src/tools/clippy/tests/ui/ignored_unit_patterns.rs17
-rw-r--r--src/tools/clippy/tests/ui/ignored_unit_patterns.stderr28
-rw-r--r--src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr4
-rw-r--r--src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed145
-rw-r--r--src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs149
-rw-r--r--src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr31
-rw-r--r--src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.rs51
-rw-r--r--src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.stderr31
-rw-r--r--src/tools/clippy/tests/ui/infallible_destructuring_match.stderr8
-rw-r--r--src/tools/clippy/tests/ui/inherent_to_string.rs26
-rw-r--r--src/tools/clippy/tests/ui/inherent_to_string.stderr10
-rw-r--r--src/tools/clippy/tests/ui/issue-7447.rs5
-rw-r--r--src/tools/clippy/tests/ui/issue-7447.stderr4
-rw-r--r--src/tools/clippy/tests/ui/iter_cloned_collect.fixed3
-rw-r--r--src/tools/clippy/tests/ui/iter_cloned_collect.rs3
-rw-r--r--src/tools/clippy/tests/ui/iter_cloned_collect.stderr10
-rw-r--r--src/tools/clippy/tests/ui/iter_overeager_cloned.stderr14
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_zero.fixed25
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_zero.rs25
-rw-r--r--src/tools/clippy/tests/ui/iter_skip_zero.stderr43
-rw-r--r--src/tools/clippy/tests/ui/iter_with_drain.stderr12
-rw-r--r--src/tools/clippy/tests/ui/let_and_return.rs71
-rw-r--r--src/tools/clippy/tests/ui/let_and_return.stderr22
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_lock.rs3
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_lock.stderr8
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_untyped.rs3
-rw-r--r--src/tools/clippy/tests/ui/let_underscore_untyped.stderr20
-rw-r--r--src/tools/clippy/tests/ui/manual_filter.stderr30
-rw-r--r--src/tools/clippy/tests/ui/manual_filter_map.fixed24
-rw-r--r--src/tools/clippy/tests/ui/manual_filter_map.rs28
-rw-r--r--src/tools/clippy/tests/ui/manual_filter_map.stderr73
-rw-r--r--src/tools/clippy/tests/ui/manual_find_map.stderr53
-rw-r--r--src/tools/clippy/tests/ui/manual_float_methods.rs55
-rw-r--r--src/tools/clippy/tests/ui/manual_float_methods.stderr80
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else.rs8
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else.stderr2
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed63
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else_question_mark.rs68
-rw-r--r--src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr55
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option.stderr42
-rw-r--r--src/tools/clippy/tests/ui/manual_map_option_2.stderr10
-rw-r--r--src/tools/clippy/tests/ui/manual_range_patterns.fixed4
-rw-r--r--src/tools/clippy/tests/ui/manual_range_patterns.rs4
-rw-r--r--src/tools/clippy/tests/ui/manual_range_patterns.stderr16
-rw-r--r--src/tools/clippy/tests/ui/manual_retain.fixed7
-rw-r--r--src/tools/clippy/tests/ui/manual_retain.rs7
-rw-r--r--src/tools/clippy/tests/ui/manual_retain.stderr38
-rw-r--r--src/tools/clippy/tests/ui/manual_split_once.stderr28
-rw-r--r--src/tools/clippy/tests/ui/manual_str_repeat.stderr20
-rw-r--r--src/tools/clippy/tests/ui/manual_try_fold.rs4
-rw-r--r--src/tools/clippy/tests/ui/manual_try_fold.stderr8
-rw-r--r--src/tools/clippy/tests/ui/map_collect_result_unit.stderr4
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or.stderr2
-rw-r--r--src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr4
-rw-r--r--src/tools/clippy/tests/ui/match_as_ref.fixed4
-rw-r--r--src/tools/clippy/tests/ui/match_as_ref.rs4
-rw-r--r--src/tools/clippy/tests/ui/match_as_ref.stderr8
-rw-r--r--src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed3
-rw-r--r--src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs3
-rw-r--r--src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr56
-rw-r--r--src/tools/clippy/tests/ui/match_on_vec_items.stderr16
-rw-r--r--src/tools/clippy/tests/ui/match_ref_pats.stderr4
-rw-r--r--src/tools/clippy/tests/ui/match_same_arms2.stderr2
-rw-r--r--src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr20
-rw-r--r--src/tools/clippy/tests/ui/methods.rs5
-rw-r--r--src/tools/clippy/tests/ui/methods.stderr4
-rw-r--r--src/tools/clippy/tests/ui/methods_fixable.stderr2
-rw-r--r--src/tools/clippy/tests/ui/methods_unfixable.rs10
-rw-r--r--src/tools/clippy/tests/ui/methods_unfixable.stderr15
-rw-r--r--src/tools/clippy/tests/ui/min_ident_chars.rs7
-rw-r--r--src/tools/clippy/tests/ui/min_ident_chars.stderr58
-rw-r--r--src/tools/clippy/tests/ui/min_max.rs4
-rw-r--r--src/tools/clippy/tests/ui/min_max.stderr26
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs1
-rw-r--r--src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr8
-rw-r--r--src/tools/clippy/tests/ui/missing_doc.rs4
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop.stderr12
-rw-r--r--src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr2
-rw-r--r--src/tools/clippy/tests/ui/must_use_candidates.fixed7
-rw-r--r--src/tools/clippy/tests/ui/must_use_candidates.rs7
-rw-r--r--src/tools/clippy/tests/ui/must_use_candidates.stderr10
-rw-r--r--src/tools/clippy/tests/ui/mut_from_ref.rs2
-rw-r--r--src/tools/clippy/tests/ui/mut_key.rs3
-rw-r--r--src/tools/clippy/tests/ui/mut_key.stderr34
-rw-r--r--src/tools/clippy/tests/ui/mut_mut.rs7
-rw-r--r--src/tools/clippy/tests/ui/mut_mut.stderr18
-rw-r--r--src/tools/clippy/tests/ui/mut_reference.rs15
-rw-r--r--src/tools/clippy/tests/ui/mut_reference.stderr6
-rw-r--r--src/tools/clippy/tests/ui/needless_borrow_pat.stderr24
-rw-r--r--src/tools/clippy/tests/ui/needless_else.stderr2
-rw-r--r--src/tools/clippy/tests/ui/needless_if.fixed3
-rw-r--r--src/tools/clippy/tests/ui/needless_if.rs3
-rw-r--r--src/tools/clippy/tests/ui/needless_if.stderr14
-rw-r--r--src/tools/clippy/tests/ui/needless_option_as_deref.stderr6
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs254
-rw-r--r--src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr110
-rw-r--r--src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed40
-rw-r--r--src/tools/clippy/tests/ui/needless_return_with_question_mark.rs40
-rw-r--r--src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr10
-rw-r--r--src/tools/clippy/tests/ui/needless_splitn.stderr26
-rw-r--r--src/tools/clippy/tests/ui/numbered_fields.stderr4
-rw-r--r--src/tools/clippy/tests/ui/option_env_unwrap.rs1
-rw-r--r--src/tools/clippy/tests/ui/option_env_unwrap.stderr18
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.fixed3
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.rs3
-rw-r--r--src/tools/clippy/tests/ui/option_if_let_else.stderr46
-rw-r--r--src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr38
-rw-r--r--src/tools/clippy/tests/ui/or_fun_call.fixed66
-rw-r--r--src/tools/clippy/tests/ui/or_fun_call.rs58
-rw-r--r--src/tools/clippy/tests/ui/or_fun_call.stderr178
-rw-r--r--src/tools/clippy/tests/ui/or_then_unwrap.stderr6
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn.stderr74
-rw-r--r--src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr12
-rw-r--r--src/tools/clippy/tests/ui/print_literal.stderr24
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.rs16
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.stderr46
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.fixed16
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.rs16
-rw-r--r--src/tools/clippy/tests/ui/ptr_as_ptr.stderr28
-rw-r--r--src/tools/clippy/tests/ui/question_mark.fixed17
-rw-r--r--src/tools/clippy/tests/ui/question_mark.rs17
-rw-r--r--src/tools/clippy/tests/ui/question_mark.stderr6
-rw-r--r--src/tools/clippy/tests/ui/range_contains.fixed2
-rw-r--r--src/tools/clippy/tests/ui/range_contains.rs2
-rw-r--r--src/tools/clippy/tests/ui/range_contains.stderr42
-rw-r--r--src/tools/clippy/tests/ui/read_line_without_trim.fixed36
-rw-r--r--src/tools/clippy/tests/ui/read_line_without_trim.rs36
-rw-r--r--src/tools/clippy/tests/ui/read_line_without_trim.stderr73
-rw-r--r--src/tools/clippy/tests/ui/read_zero_byte_vec.rs6
-rw-r--r--src/tools/clippy/tests/ui/read_zero_byte_vec.stderr20
-rw-r--r--src/tools/clippy/tests/ui/readonly_write_lock.rs42
-rw-r--r--src/tools/clippy/tests/ui/readonly_write_lock.stderr16
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation.rs9
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation.stderr40
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed12
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation_fixable.rs12
-rw-r--r--src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr24
-rw-r--r--src/tools/clippy/tests/ui/redundant_guards.fixed146
-rw-r--r--src/tools/clippy/tests/ui/redundant_guards.rs146
-rw-r--r--src/tools/clippy/tests/ui/redundant_guards.stderr98
-rw-r--r--src/tools/clippy/tests/ui/redundant_locals.rs120
-rw-r--r--src/tools/clippy/tests/ui/redundant_locals.stderr136
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr44
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed6
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs6
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr72
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed14
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs14
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr128
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr36
-rw-r--r--src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr56
-rw-r--r--src/tools/clippy/tests/ui/ref_binding_to_reference.stderr14
-rw-r--r--src/tools/clippy/tests/ui/rename.fixed6
-rw-r--r--src/tools/clippy/tests/ui/rename.rs6
-rw-r--r--src/tools/clippy/tests/ui/rename.stderr118
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.fixed2
-rw-r--r--src/tools/clippy/tests/ui/result_map_or_into_option.rs2
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr36
-rw-r--r--src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr12
-rw-r--r--src/tools/clippy/tests/ui/self_assignment.rs4
-rw-r--r--src/tools/clippy/tests/ui/self_assignment.stderr6
-rw-r--r--src/tools/clippy/tests/ui/semicolon_if_nothing_returned.fixed123
-rw-r--r--src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs7
-rw-r--r--src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr10
-rw-r--r--src/tools/clippy/tests/ui/shadow.rs7
-rw-r--r--src/tools/clippy/tests/ui/shadow.stderr92
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs3
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr52
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_tightening.fixed50
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_tightening.rs50
-rw-r--r--src/tools/clippy/tests/ui/significant_drop_tightening.stderr6
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.fixed4
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.rs4
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports.stderr4
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs2
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr4
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs1
-rw-r--r--src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs1
-rw-r--r--src/tools/clippy/tests/ui/single_match.fixed1
-rw-r--r--src/tools/clippy/tests/ui/single_match.rs1
-rw-r--r--src/tools/clippy/tests/ui/single_match.stderr72
-rw-r--r--src/tools/clippy/tests/ui/single_match_else.stderr18
-rw-r--r--src/tools/clippy/tests/ui/slow_vector_initialization.rs16
-rw-r--r--src/tools/clippy/tests/ui/slow_vector_initialization.stderr62
-rw-r--r--src/tools/clippy/tests/ui/string_extend.stderr8
-rw-r--r--src/tools/clippy/tests/ui/string_lit_chars_any.fixed50
-rw-r--r--src/tools/clippy/tests/ui/string_lit_chars_any.rs50
-rw-r--r--src/tools/clippy/tests/ui/string_lit_chars_any.stderr58
-rw-r--r--src/tools/clippy/tests/ui/strlen_on_c_strings.stderr14
-rw-r--r--src/tools/clippy/tests/ui/suspicious_xor_used_as_pow.stderr10
-rw-r--r--src/tools/clippy/tests/ui/swap.fixed3
-rw-r--r--src/tools/clippy/tests/ui/swap.rs3
-rw-r--r--src/tools/clippy/tests/ui/swap.stderr34
-rw-r--r--src/tools/clippy/tests/ui/to_digit_is_some.stderr4
-rw-r--r--src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs3
-rw-r--r--src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr36
-rw-r--r--src/tools/clippy/tests/ui/try_err.fixed6
-rw-r--r--src/tools/clippy/tests/ui/try_err.rs6
-rw-r--r--src/tools/clippy/tests/ui/try_err.stderr44
-rw-r--r--src/tools/clippy/tests/ui/tuple_array_conversions.rs30
-rw-r--r--src/tools/clippy/tests/ui/tuple_array_conversions.stderr36
-rw-r--r--src/tools/clippy/tests/ui/type_id_on_box.fixed40
-rw-r--r--src/tools/clippy/tests/ui/type_id_on_box.rs40
-rw-r--r--src/tools/clippy/tests/ui/type_id_on_box.stderr36
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast.fixed17
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast.rs17
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast.stderr88
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs22
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr16
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_clone.stderr12
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_filter_map.rs6
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_filter_map.stderr8
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_find_map.rs6
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_find_map.stderr8
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed21
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs21
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr86
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_to_owned.fixed3
-rw-r--r--src/tools/clippy/tests/ui/unnecessary_to_owned.rs3
-rw-r--r--src/tools/clippy/tests/ui/unsafe_removed_from_name.rs8
-rw-r--r--src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr16
-rw-r--r--src/tools/clippy/tests/ui/unused_async.rs17
-rw-r--r--src/tools/clippy/tests/ui/unused_async.stderr14
-rw-r--r--src/tools/clippy/tests/ui/unused_io_amount.rs2
-rw-r--r--src/tools/clippy/tests/ui/unused_peekable.rs3
-rw-r--r--src/tools/clippy/tests/ui/unused_peekable.stderr16
-rw-r--r--src/tools/clippy/tests/ui/unwrap.stderr9
-rw-r--r--src/tools/clippy/tests/ui/unwrap_expect_used.rs11
-rw-r--r--src/tools/clippy/tests/ui/unwrap_expect_used.stderr24
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or.stderr4
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or_else_default.fixed62
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or_else_default.rs62
-rw-r--r--src/tools/clippy/tests/ui/unwrap_or_else_default.stderr100
-rw-r--r--src/tools/clippy/tests/ui/useless_asref.fixed6
-rw-r--r--src/tools/clippy/tests/ui/useless_asref.rs6
-rw-r--r--src/tools/clippy/tests/ui/useless_asref.stderr44
-rw-r--r--src/tools/clippy/tests/ui/vec.fixed40
-rw-r--r--src/tools/clippy/tests/ui/vec.rs40
-rw-r--r--src/tools/clippy/tests/ui/vec.stderr24
-rw-r--r--src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr12
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports.fixed1
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports.rs1
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports.stderr24
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed1
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr24
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed1
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr24
-rw-r--r--src/tools/clippy/tests/ui/wildcard_imports_2021.rs1
-rw-r--r--src/tools/clippy/tests/ui/write_literal.stderr24
-rw-r--r--src/tools/clippy/tests/ui/write_literal_2.stderr28
-rw-r--r--src/tools/clippy/triagebot.toml5
-rwxr-xr-xsrc/tools/clippy/util/fetch_prs_between.sh22
-rw-r--r--src/tools/collect-license-metadata/src/path_tree.rs5
-rw-r--r--src/tools/compiletest/Cargo.toml2
-rw-r--r--src/tools/compiletest/src/header.rs47
-rw-r--r--src/tools/compiletest/src/header/cfg.rs4
-rw-r--r--src/tools/compiletest/src/lib.rs2
-rw-r--r--src/tools/compiletest/src/runtest.rs153
-rw-r--r--src/tools/compiletest/src/runtest/debugger.rs148
-rw-r--r--src/tools/compiletest/src/runtest/tests.rs8
-rw-r--r--src/tools/jsondocck/src/cache.rs6
-rw-r--r--src/tools/jsondoclint/src/main.rs11
-rw-r--r--src/tools/linkchecker/main.rs1
-rw-r--r--src/tools/opt-dist/Cargo.toml23
-rw-r--r--src/tools/opt-dist/README.md7
-rw-r--r--src/tools/opt-dist/src/bolt.rs103
-rw-r--r--src/tools/opt-dist/src/environment/linux.rs58
-rw-r--r--src/tools/opt-dist/src/environment/mod.rs77
-rw-r--r--src/tools/opt-dist/src/environment/windows.rs82
-rw-r--r--src/tools/opt-dist/src/exec.rs179
-rw-r--r--src/tools/opt-dist/src/main.rs215
-rw-r--r--src/tools/opt-dist/src/metrics.rs106
-rw-r--r--src/tools/opt-dist/src/tests.rs114
-rw-r--r--src/tools/opt-dist/src/timer.rs167
-rw-r--r--src/tools/opt-dist/src/training.rs223
-rw-r--r--src/tools/opt-dist/src/utils/io.rs88
-rw-r--r--src/tools/opt-dist/src/utils/mod.rs75
-rw-r--r--src/tools/rust-analyzer/.editorconfig13
-rw-r--r--src/tools/rust-analyzer/Cargo.lock472
-rw-r--r--src/tools/rust-analyzer/Cargo.toml24
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs114
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs6
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs42
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs71
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs47
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs131
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expander.rs87
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/hir.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs645
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs171
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lower.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs76
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs91
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs53
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs120
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs15
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs124
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/pretty.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs79
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs196
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs478
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs237
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs50
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs249
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs453
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs355
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs115
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs44
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs46
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs54
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs47
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs152
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs28
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs98
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs54
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs1636
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs651
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs177
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs118
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs689
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs48
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs38
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs67
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/from_id.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs36
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs250
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs31
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs184
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs158
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs75
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs125
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs164
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs113
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs166
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs1051
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs106
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs (renamed from src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs)438
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs175
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs429
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs143
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs56
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs739
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs140
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs53
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs60
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs233
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs208
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs181
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs41
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs75
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs262
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/search.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs88
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs89
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/interpret_function.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs37
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs327
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs61
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/ssr.rs57
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs72
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs36
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html17
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs27
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs409
-rw-r--r--src/tools/rust-analyzer/crates/intern/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/Cargo.toml25
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/src/lib.rs441
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs11
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs10
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs12
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs42
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs84
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs148
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs11
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs136
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs19
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/paths/src/lib.rs15
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs11
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs21
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/tree.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs17
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs27
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs23
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs8
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs6
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/sysroot.rs22
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs3
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs92
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml13
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs94
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs18
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs166
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs32
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs205
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs59
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs89
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs72
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs15
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs30
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs27
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs28
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs260
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs27
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs18
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs301
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs621
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs26
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs85
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs30
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs93
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs61
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs103
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs1
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/fixture.rs95
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs69
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/buffer.rs6
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs18
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml3
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs8
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md47
-rw-r--r--src/tools/rust-analyzer/docs/dev/style.md13
-rw-r--r--src/tools/rust-analyzer/docs/user/manual.adoc93
-rw-r--r--src/tools/rust-analyzer/lib/README.md9
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/lib/line-index/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/lib/line-index/src/lib.rs218
-rw-r--r--src/tools/rust-analyzer/lib/line-index/src/tests.rs129
-rw-r--r--src/tools/rust-analyzer/lib/line-index/tests/it.rs62
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/msg.rs2
-rw-r--r--src/tools/rust-analyzer/triagebot.toml4
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/xtask/src/flags.rs29
-rw-r--r--src/tools/rust-analyzer/xtask/src/install.rs10
-rw-r--r--src/tools/rust-analyzer/xtask/src/metrics.rs76
-rw-r--r--src/tools/rust-analyzer/xtask/src/publish.rs11
-rw-r--r--src/tools/rust-installer/install-template.sh2
-rw-r--r--src/tools/rustfmt/src/expr.rs12
-rw-r--r--src/tools/rustfmt/src/macros.rs28
-rw-r--r--src/tools/rustfmt/src/matches.rs2
-rw-r--r--src/tools/rustfmt/src/parse/macros/mod.rs2
-rw-r--r--src/tools/rustfmt/src/parse/session.rs35
-rw-r--r--src/tools/rustfmt/src/test/mod.rs4
-rw-r--r--src/tools/rustfmt/src/utils.rs4
-rw-r--r--src/tools/tidy/config/black.toml15
-rw-r--r--src/tools/tidy/config/requirements.in10
-rw-r--r--src/tools/tidy/config/requirements.txt117
-rw-r--r--src/tools/tidy/config/ruff.toml41
-rw-r--r--src/tools/tidy/src/deps.rs29
-rw-r--r--src/tools/tidy/src/ext_tool_checks.rs435
-rw-r--r--src/tools/tidy/src/features.rs1
-rw-r--r--src/tools/tidy/src/fluent_alphabetical.rs2
-rw-r--r--src/tools/tidy/src/lib.rs1
-rw-r--r--src/tools/tidy/src/main.rs13
-rw-r--r--src/tools/tidy/src/pal.rs6
-rw-r--r--src/tools/tidy/src/style.rs4
-rw-r--r--src/tools/tidy/src/ui_tests.rs10
1795 files changed, 42785 insertions, 15164 deletions
diff --git a/src/tools/build-manifest/Cargo.toml b/src/tools/build-manifest/Cargo.toml
index 6c3b5bb00..7e0c4bee2 100644
--- a/src/tools/build-manifest/Cargo.toml
+++ b/src/tools/build-manifest/Cargo.toml
@@ -8,7 +8,7 @@ toml = "0.5"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
anyhow = "1.0.32"
-flate2 = "1.0.16"
+flate2 = "1.0.26"
xz2 = "0.1.7"
tar = "0.4.29"
sha2 = "0.10.1"
diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs
index 8b28c68e0..778609da0 100644
--- a/src/tools/build-manifest/src/main.rs
+++ b/src/tools/build-manifest/src/main.rs
@@ -100,6 +100,7 @@ static TARGETS: &[&str] = &[
"i686-unknown-uefi",
"loongarch64-unknown-linux-gnu",
"m68k-unknown-linux-gnu",
+ "csky-unknown-linux-gnuabiv2",
"mips-unknown-linux-gnu",
"mips-unknown-linux-musl",
"mips64-unknown-linux-gnuabi64",
@@ -122,11 +123,13 @@ static TARGETS: &[&str] = &[
"riscv32imac-unknown-none-elf",
"riscv32gc-unknown-linux-gnu",
"riscv64imac-unknown-none-elf",
+ "riscv64gc-unknown-hermit",
"riscv64gc-unknown-none-elf",
"riscv64gc-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"sparc64-unknown-linux-gnu",
"sparcv9-sun-solaris",
+ "sparc-unknown-none-elf",
"thumbv6m-none-eabi",
"thumbv7em-none-eabi",
"thumbv7em-none-eabihf",
@@ -137,6 +140,7 @@ static TARGETS: &[&str] = &[
"wasm32-unknown-emscripten",
"wasm32-unknown-unknown",
"wasm32-wasi",
+ "wasm32-wasi-preview1-threads",
"x86_64-apple-darwin",
"x86_64-apple-ios",
"x86_64-fortanix-unknown-sgx",
@@ -146,6 +150,7 @@ static TARGETS: &[&str] = &[
"x86_64-pc-windows-msvc",
"x86_64-sun-solaris",
"x86_64-pc-solaris",
+ "x86_64-unikraft-linux-musl",
"x86_64-unknown-freebsd",
"x86_64-unknown-illumos",
"x86_64-unknown-linux-gnu",
diff --git a/src/tools/build_helper/Cargo.toml b/src/tools/build_helper/Cargo.toml
index 99f6fea2e..66894e1ab 100644
--- a/src/tools/build_helper/Cargo.toml
+++ b/src/tools/build_helper/Cargo.toml
@@ -6,3 +6,5 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
+serde = "1"
+serde_derive = "1"
diff --git a/src/tools/build_helper/src/ci.rs b/src/tools/build_helper/src/ci.rs
index 893195b69..a8505ec95 100644
--- a/src/tools/build_helper/src/ci.rs
+++ b/src/tools/build_helper/src/ci.rs
@@ -36,15 +36,26 @@ impl CiEnv {
}
pub mod gha {
+ use std::sync::Mutex;
+
+ static ACTIVE_GROUPS: Mutex<Vec<String>> = Mutex::new(Vec::new());
+
/// All github actions log messages from this call to the Drop of the return value
- /// will be grouped and hidden by default in logs. Note that nesting these does
- /// not really work.
+ /// will be grouped and hidden by default in logs. Note that since github actions doesn't
+ /// support group nesting, any active group will be first finished when a subgroup is started,
+ /// and then re-started when the subgroup finishes.
+ #[track_caller]
pub fn group(name: impl std::fmt::Display) -> Group {
- if std::env::var_os("GITHUB_ACTIONS").is_some() {
- eprintln!("::group::{name}");
- } else {
- eprintln!("{name}")
+ let mut groups = ACTIVE_GROUPS.lock().unwrap();
+
+ // A group is currently active. End it first to avoid nesting.
+ if !groups.is_empty() {
+ end_group();
}
+
+ let name = name.to_string();
+ start_group(&name);
+ groups.push(name);
Group(())
}
@@ -54,9 +65,36 @@ pub mod gha {
impl Drop for Group {
fn drop(&mut self) {
- if std::env::var_os("GITHUB_ACTIONS").is_some() {
- eprintln!("::endgroup::");
+ end_group();
+
+ let mut groups = ACTIVE_GROUPS.lock().unwrap();
+ // Remove the current group
+ groups.pop();
+
+ // If there was some previous group, restart it
+ if is_in_gha() {
+ if let Some(name) = groups.last() {
+ start_group(format!("{name} (continued)"));
+ }
}
}
}
+
+ fn start_group(name: impl std::fmt::Display) {
+ if is_in_gha() {
+ eprintln!("::group::{name}");
+ } else {
+ eprintln!("{name}")
+ }
+ }
+
+ fn end_group() {
+ if is_in_gha() {
+ eprintln!("::endgroup::");
+ }
+ }
+
+ fn is_in_gha() -> bool {
+ std::env::var_os("GITHUB_ACTIONS").is_some()
+ }
}
diff --git a/src/tools/build_helper/src/git.rs b/src/tools/build_helper/src/git.rs
index 168633c8f..66876e02c 100644
--- a/src/tools/build_helper/src/git.rs
+++ b/src/tools/build_helper/src/git.rs
@@ -94,7 +94,9 @@ pub fn get_git_modified_files(
git_dir: Option<&Path>,
extensions: &Vec<&str>,
) -> Result<Option<Vec<String>>, String> {
- let Ok(updated_master) = updated_master_branch(git_dir) else { return Ok(None); };
+ let Ok(updated_master) = updated_master_branch(git_dir) else {
+ return Ok(None);
+ };
let git = || {
let mut git = Command::new("git");
@@ -119,7 +121,9 @@ pub fn get_git_modified_files(
/// Returns the files that haven't been added to git yet.
pub fn get_git_untracked_files(git_dir: Option<&Path>) -> Result<Option<Vec<String>>, String> {
- let Ok(_updated_master) = updated_master_branch(git_dir) else { return Ok(None); };
+ let Ok(_updated_master) = updated_master_branch(git_dir) else {
+ return Ok(None);
+ };
let mut git = Command::new("git");
if let Some(git_dir) = git_dir {
git.current_dir(git_dir);
diff --git a/src/tools/build_helper/src/lib.rs b/src/tools/build_helper/src/lib.rs
index 3fa970373..575f36771 100644
--- a/src/tools/build_helper/src/lib.rs
+++ b/src/tools/build_helper/src/lib.rs
@@ -1,3 +1,4 @@
pub mod ci;
pub mod git;
+pub mod metrics;
pub mod util;
diff --git a/src/tools/build_helper/src/metrics.rs b/src/tools/build_helper/src/metrics.rs
new file mode 100644
index 000000000..2d0c66a8f
--- /dev/null
+++ b/src/tools/build_helper/src/metrics.rs
@@ -0,0 +1,92 @@
+use serde_derive::{Deserialize, Serialize};
+
+#[derive(Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub struct JsonRoot {
+ #[serde(default)] // For version 0 the field was not present.
+ pub format_version: usize,
+ pub system_stats: JsonInvocationSystemStats,
+ pub invocations: Vec<JsonInvocation>,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub struct JsonInvocation {
+ // Unix timestamp in seconds
+ //
+ // This is necessary to easily correlate this invocation with logs or other data.
+ pub start_time: u64,
+ pub duration_including_children_sec: f64,
+ pub children: Vec<JsonNode>,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub enum JsonNode {
+ RustbuildStep {
+ #[serde(rename = "type")]
+ type_: String,
+ debug_repr: String,
+
+ duration_excluding_children_sec: f64,
+ system_stats: JsonStepSystemStats,
+
+ children: Vec<JsonNode>,
+ },
+ TestSuite(TestSuite),
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct TestSuite {
+ pub metadata: TestSuiteMetadata,
+ pub tests: Vec<Test>,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub enum TestSuiteMetadata {
+ CargoPackage {
+ crates: Vec<String>,
+ target: String,
+ host: String,
+ stage: u32,
+ },
+ Compiletest {
+ suite: String,
+ mode: String,
+ compare_mode: Option<String>,
+ target: String,
+ host: String,
+ stage: u32,
+ },
+}
+
+#[derive(Serialize, Deserialize)]
+pub struct Test {
+ pub name: String,
+ #[serde(flatten)]
+ pub outcome: TestOutcome,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(tag = "outcome", rename_all = "snake_case")]
+pub enum TestOutcome {
+ Passed,
+ Failed,
+ Ignored { ignore_reason: Option<String> },
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub struct JsonInvocationSystemStats {
+ pub cpu_threads_count: usize,
+ pub cpu_model: String,
+
+ pub memory_total_bytes: u64,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub struct JsonStepSystemStats {
+ pub cpu_utilization_percent: f64,
+}
diff --git a/src/tools/build_helper/src/util.rs b/src/tools/build_helper/src/util.rs
index 11b8a228b..5801a8648 100644
--- a/src/tools/build_helper/src/util.rs
+++ b/src/tools/build_helper/src/util.rs
@@ -1,10 +1,12 @@
use std::process::Command;
/// Invokes `build_helper::util::detail_exit` with `cfg!(test)`
+///
+/// This is a macro instead of a function so that it uses `cfg(test)` in the *calling* crate, not in build helper.
#[macro_export]
-macro_rules! detail_exit_macro {
+macro_rules! exit {
($code:expr) => {
- build_helper::util::detail_exit($code, cfg!(test));
+ $crate::util::detail_exit($code, cfg!(test));
};
}
diff --git a/src/tools/cargo/.cargo/config.toml b/src/tools/cargo/.cargo/config.toml
index c4a595b61..17d424a5d 100644
--- a/src/tools/cargo/.cargo/config.toml
+++ b/src/tools/cargo/.cargo/config.toml
@@ -1,4 +1,4 @@
[alias]
build-man = "run --package xtask-build-man --"
stale-label = "run --package xtask-stale-label --"
-unpublished = "run --package xtask-unpublished --"
+bump-check = "run --package xtask-bump-check --"
diff --git a/src/tools/cargo/.github/renovate.json5 b/src/tools/cargo/.github/renovate.json5
new file mode 100644
index 000000000..8ad9952d2
--- /dev/null
+++ b/src/tools/cargo/.github/renovate.json5
@@ -0,0 +1,38 @@
+{
+ schedule: [
+ 'before 5am on the first day of the month',
+ ],
+ semanticCommits: 'enabled',
+ configMigration: true,
+ dependencyDashboard: false,
+ ignorePaths: [
+ "**/tests/**",
+ ],
+ packageRules: [
+ // Goals:
+ // - Rollup safe upgrades to reduce CI runner load
+ // - Have lockfile and manifest in-sync (implicit rules)
+ {
+ matchManagers: [
+ 'cargo',
+ ],
+ matchCurrentVersion: '>=0.1.0',
+ matchUpdateTypes: [
+ 'patch',
+ ],
+ automerge: false,
+ groupName: 'compatible',
+ },
+ {
+ matchManagers: [
+ 'cargo',
+ ],
+ matchCurrentVersion: '>=1.0.0',
+ matchUpdateTypes: [
+ 'minor',
+ ],
+ automerge: false,
+ groupName: 'compatible',
+ },
+ ],
+}
diff --git a/src/tools/cargo/.github/workflows/main.yml b/src/tools/cargo/.github/workflows/main.yml
index 3deae6355..2e71f14b8 100644
--- a/src/tools/cargo/.github/workflows/main.yml
+++ b/src/tools/cargo/.github/workflows/main.yml
@@ -13,6 +13,41 @@ permissions:
contents: read
jobs:
+ success:
+ permissions:
+ contents: none
+ name: bors build finished
+ needs:
+ - build_std
+ - clippy
+ - docs
+ - lockfile
+ - resolver
+ - rustfmt
+ - test
+ - test_gitoxide
+ runs-on: ubuntu-latest
+ if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'"
+ steps:
+ - run: echo ok
+ failure:
+ permissions:
+ contents: none
+ name: bors build finished
+ needs:
+ - build_std
+ - clippy
+ - docs
+ - lockfile
+ - resolver
+ - rustfmt
+ - test
+ - test_gitoxide
+ runs-on: ubuntu-latest
+ if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'"
+ steps:
+ - run: exit 1
+
# Check Code style quickly by running `rustfmt` over all code
rustfmt:
runs-on: ubuntu-latest
@@ -52,12 +87,18 @@ jobs:
runs-on: ubuntu-latest
env:
BASE_SHA: ${{ github.event.pull_request.base.sha }}
- HEAD_SHA: ${{ github.sha }}
+ HEAD_SHA: ${{ github.event.pull_request.head.sha != '' && github.event.pull_request.head.sha || github.sha }}
steps:
- uses: actions/checkout@v3
with:
- fetch-depth: 0 # make `git diff` work
+ fetch-depth: 0
- run: rustup update stable && rustup default stable
+ - name: Install cargo-semver-checks
+ run: |
+ mkdir installed-bins
+ curl -Lf https://github.com/obi1kenobi/cargo-semver-checks/releases/download/v0.22.1/cargo-semver-checks-x86_64-unknown-linux-gnu.tar.gz \
+ | tar -xz --directory=./installed-bins
+ echo `pwd`/installed-bins >> $GITHUB_PATH
- run: ci/validate-version-bump.sh
test:
@@ -119,6 +160,7 @@ jobs:
- run: cargo test -p cargo
- name: Clear intermediate test output
run: ci/clean-test-output.sh
+
- name: gitoxide tests (all git-related tests)
run: cargo test -p cargo git
env:
@@ -127,22 +169,14 @@ jobs:
# running out of disk space.
- name: Clear test output
run: ci/clean-test-output.sh
+
# This only tests `cargo fix` because fix-proxy-mode is one of the most
# complicated subprocess management in Cargo.
- name: Check operability of rustc invocation with argfile
run: 'cargo test -p cargo --test testsuite -- fix::'
env:
__CARGO_TEST_FORCE_ARGFILE: 1
- - run: cargo test -p cargo-test-support
- - run: cargo test -p cargo-platform
- - run: cargo test -p cargo-util
- - run: cargo test -p home
- - run: cargo test -p mdman
- - run: cargo build -p cargo-credential-1password
- - run: cargo build -p cargo-credential-macos-keychain
- - run: cargo build -p cargo-credential-wincred
- - run: cargo build -p cargo-credential-gnome-secret
- if: matrix.os == 'ubuntu-latest'
+ - run: cargo test --workspace --exclude cargo --exclude benchsuite
- name: Check benchmarks
run: |
# This only tests one benchmark since it can take over 10 minutes to
@@ -153,6 +187,7 @@ jobs:
# running out of disk space.
- name: Clear benchmark output
run: ci/clean-test-output.sh
+
- name: Fetch smoke test
run: ci/fetch-smoke-test.sh
@@ -203,7 +238,7 @@ jobs:
- name: Install mdbook
run: |
mkdir mdbook
- curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.27/mdbook-v0.4.27-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
+ curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.31/mdbook-v0.4.31-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook
echo `pwd`/mdbook >> $GITHUB_PATH
- run: cd src/doc && mdbook build --dest-dir ../../target/doc
- name: Run linkchecker.sh
@@ -211,38 +246,3 @@ jobs:
cd target
curl -sSLO https://raw.githubusercontent.com/rust-lang/rust/master/src/tools/linkchecker/linkcheck.sh
sh linkcheck.sh --all --path ../src/doc cargo
-
- success:
- permissions:
- contents: none
- name: bors build finished
- needs:
- - build_std
- - clippy
- - docs
- - lockfile
- - resolver
- - rustfmt
- - test
- - test_gitoxide
- runs-on: ubuntu-latest
- if: "success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'"
- steps:
- - run: echo ok
- failure:
- permissions:
- contents: none
- name: bors build finished
- needs:
- - build_std
- - clippy
- - docs
- - lockfile
- - resolver
- - rustfmt
- - test
- - test_gitoxide
- runs-on: ubuntu-latest
- if: "!success() && github.event_name == 'push' && github.ref == 'refs/heads/auto-cargo'"
- steps:
- - run: exit 1
diff --git a/src/tools/cargo/CHANGELOG.md b/src/tools/cargo/CHANGELOG.md
index 0784b2638..0141302c8 100644
--- a/src/tools/cargo/CHANGELOG.md
+++ b/src/tools/cargo/CHANGELOG.md
@@ -1,16 +1,67 @@
# Changelog
+## Cargo 1.73 (2023-10-05)
+[45782b6b...HEAD](https://github.com/rust-lang/cargo/compare/45782b6b...HEAD)
+
+### Added
+
+### Changed
+
+### Fixed
+
+### Nightly only
+
+- Fixed invalid package names generated by `-Zscript`.
+ [#12349](https://github.com/rust-lang/cargo/pull/12349)
+- `-Zscript` now errors out on unsupported commands — `publish` and `package`.
+ [#12350](https://github.com/rust-lang/cargo/pull/12350)
+
+### Documentation
+
+- Use heading attributes to control the fragment.
+ [#12339](https://github.com/rust-lang/cargo/pull/12339)
+- Use "number" instead of "digit" when explaining Cargo's use of semver.
+ [#12340](https://github.com/rust-lang/cargo/pull/12340)
+- contrib: Add some more detail about how publishing works.
+ [#12344](https://github.com/rust-lang/cargo/pull/12344)
+- Clarify "Package ID" and "Source ID" in `cargo metadata` are opaque strings.
+ [#12313](https://github.com/rust-lang/cargo/pull/12313)
+- Added `profile.strip` to configuration docs.
+ [#12337](https://github.com/rust-lang/cargo/pull/12337)
+- Multiple versions that differ only in the metadata tag are disallowed on crates.io.
+ [#12335](https://github.com/rust-lang/cargo/pull/12335)
+
+### Internal
+
+- Updated to `criterion` 0.5.1.
+ [#12338](https://github.com/rust-lang/cargo/pull/12338)
+- ci: automatically test new packages by using `--workspace`.
+ [#12342](https://github.com/rust-lang/cargo/pull/12342)
+- ci: automatically update dependencies monthly with Renovate.
+ [#12341](https://github.com/rust-lang/cargo/pull/12341)
+
## Cargo 1.72 (2023-08-24)
-[64fb38c9...HEAD](https://github.com/rust-lang/cargo/compare/64fb38c9...HEAD)
+[64fb38c9...rust-1.72.0](https://github.com/rust-lang/cargo/compare/64fb38c9...rust-1.72.0)
### Added
+- ❗ Enable `-Zdoctest-in-workspace` by default. When running each documentation
+ test, the working directory is set to the root directory of the package the
+ test belongs to.
+ [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-test.html#working-directory-of-tests)
+ [#12221](https://github.com/rust-lang/cargo/pull/12221)
+ [#12288](https://github.com/rust-lang/cargo/pull/12288)
- Add support of the "default" keyword to reset previously set `build.jobs`
parallelism back to the default.
[#12222](https://github.com/rust-lang/cargo/pull/12222)
### Changed
+- ❗ Turned feature name validation check to a hard error. The warning was
+ added in Rust 1.49. These extended characters aren't allowed on crates.io, so
+ this should only impact users of other registries, or people who don't publish
+ to a registry.
+ [#12291](https://github.com/rust-lang/cargo/pull/12291)
- Cargo now warns when an edition 2021 package is in a virtual workspace and
`workspace.resolver` is not set. It is recommended to set the resolver
version for workspaces explicitly.
@@ -42,8 +93,29 @@
([eRFC 3424](https://github.com/rust-lang/rfcs/blob/master/text/3424-cargo-script.md))
[docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#script)
[#12245](https://github.com/rust-lang/cargo/pull/12245)
+ [#12255](https://github.com/rust-lang/cargo/pull/12255)
+ [#12258](https://github.com/rust-lang/cargo/pull/12258)
+ [#12262](https://github.com/rust-lang/cargo/pull/12262)
+ [#12268](https://github.com/rust-lang/cargo/pull/12268)
+ [#12269](https://github.com/rust-lang/cargo/pull/12269)
+ [#12281](https://github.com/rust-lang/cargo/pull/12281)
+ [#12282](https://github.com/rust-lang/cargo/pull/12282)
+ [#12283](https://github.com/rust-lang/cargo/pull/12283)
+ [#12284](https://github.com/rust-lang/cargo/pull/12284)
+ [#12287](https://github.com/rust-lang/cargo/pull/12287)
+ [#12289](https://github.com/rust-lang/cargo/pull/12289)
+ [#12303](https://github.com/rust-lang/cargo/pull/12303)
+ [#12305](https://github.com/rust-lang/cargo/pull/12305)
+ [#12308](https://github.com/rust-lang/cargo/pull/12308)
- Automatically inherit workspace lints when running `cargo new`/`cargo init`.
[#12174](https://github.com/rust-lang/cargo/pull/12174)
+- Removed `-Zjobserver-per-rustc` again.
+ [#12285](https://github.com/rust-lang/cargo/pull/12285)
+- Added `.toml` file extension restriction for `-Zconfig-include`.
+ [#12298](https://github.com/rust-lang/cargo/pull/12298)
+- Added `-Znext-lockfile-bump` to prepare for the next lockfile bump.
+ [#12279](https://github.com/rust-lang/cargo/pull/12279)
+ [#12302](https://github.com/rust-lang/cargo/pull/12302)
### Documentation
@@ -55,6 +127,18 @@
[#12192](https://github.com/rust-lang/cargo/pull/12192)
[#12239](https://github.com/rust-lang/cargo/pull/12239)
[#12247](https://github.com/rust-lang/cargo/pull/12247)
+- Added more documentation for `Source` download functions.
+ [#12319](https://github.com/rust-lang/cargo/pull/12319)
+- Added READMEs for the credential helpers.
+ [#12322](https://github.com/rust-lang/cargo/pull/12322)
+- Fixed version requirement example in Dependency Resolution.
+ [#12267](https://github.com/rust-lang/cargo/pull/12267)
+- Clarify the default behavior of cargo-install.
+ [#12276](https://github.com/rust-lang/cargo/pull/12276)
+- Clarify the use of "default" branch instead of `main` by default.
+ [#12251](https://github.com/rust-lang/cargo/pull/12251)
+- Provide guidance on version requirements.
+ [#12323](https://github.com/rust-lang/cargo/pull/12323)
### Internal
@@ -62,9 +146,13 @@
[#12236](https://github.com/rust-lang/cargo/pull/12236)
- Updated to `curl-sys` 0.4.63, which corresponds to curl 8.1.2.
[#12218](https://github.com/rust-lang/cargo/pull/12218)
+- Updated to `openssl` 0.10.55.
+ [#12300](https://github.com/rust-lang/cargo/pull/12300)
+- Updated several dependencies.
+ [#12261](https://github.com/rust-lang/cargo/pull/12261)
- Removed unused features from `windows-sys` dependency.
[#12176](https://github.com/rust-lang/cargo/pull/12176)
-- Refactored compiler invocations
+- Refactored compiler invocations.
[#12211](https://github.com/rust-lang/cargo/pull/12211)
- Refactored git and registry sources, and registry data.
[#12203](https://github.com/rust-lang/cargo/pull/12203)
@@ -83,6 +171,18 @@
[#12199](https://github.com/rust-lang/cargo/pull/12199)
- Migrated print-ban from test to clippy
[#12246](https://github.com/rust-lang/cargo/pull/12246)
+- Switched to `OnceLock` for interning uses.
+ [#12217](https://github.com/rust-lang/cargo/pull/12217)
+- Removed a unnecessary `.clone`.
+ [#12213](https://github.com/rust-lang/cargo/pull/12213)
+- Don't try to compile `cargo-credential-gnome-secret` on non-Linux platforms.
+ [#12321](https://github.com/rust-lang/cargo/pull/12321)
+- Use macro to remove duplication of workspace inheritable fields getters.
+ [#12317](https://github.com/rust-lang/cargo/pull/12317)
+- Extracted and rearranged registry API items to their own modules.
+ [#12290](https://github.com/rust-lang/cargo/pull/12290)
+- Show a better error when container tests fail.
+ [#12264](https://github.com/rust-lang/cargo/pull/12264)
## Cargo 1.71 (2023-07-13)
[84b7041f...rust-1.71.0](https://github.com/rust-lang/cargo/compare/84b7041f...rust-1.71.0)
@@ -94,8 +194,6 @@
[#11958](https://github.com/rust-lang/cargo/pull/11958)
- Added `workspace_default_members` to the output of `cargo metadata`.
[#11978](https://github.com/rust-lang/cargo/pull/11978)
-- `cargo add` now considers `rust-version` when selecting packages.
- [#12078](https://github.com/rust-lang/cargo/pull/12078)
- Automatically inherit workspace fields when running `cargo new`/`cargo init`.
[#12069](https://github.com/rust-lang/cargo/pull/12069)
@@ -173,6 +271,8 @@
[#11981](https://github.com/rust-lang/cargo/pull/11981)
- Added `-Zmsrv-policy` feature flag placeholder.
[#12043](https://github.com/rust-lang/cargo/pull/12043)
+- `cargo add` now considers `rust-version` when selecting packages with `-Zmsrv-policy`.
+ [#12078](https://github.com/rust-lang/cargo/pull/12078)
### Documentation
diff --git a/src/tools/cargo/Cargo.lock b/src/tools/cargo/Cargo.lock
index fe365bbcb..371504138 100644
--- a/src/tools/cargo/Cargo.lock
+++ b/src/tools/cargo/Cargo.lock
@@ -39,6 +39,12 @@ dependencies = [
]
[[package]]
+name = "anes"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
+
+[[package]]
name = "anstream"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -74,7 +80,7 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
dependencies = [
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -84,14 +90,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188"
dependencies = [
"anstyle",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
name = "anyhow"
-version = "1.0.71"
+version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
+checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
[[package]]
name = "arc-swap"
@@ -106,17 +112,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]]
-name = "atty"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
-dependencies = [
- "hermit-abi 0.1.19",
- "libc",
- "winapi",
-]
-
-[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -174,9 +169,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.3.2"
+version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
+checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
[[package]]
name = "bitmaps"
@@ -262,20 +257,23 @@ dependencies = [
[[package]]
name = "cargo"
-version = "0.73.0"
+version = "0.74.0"
dependencies = [
"anyhow",
"base64",
"bytesize",
- "cargo-platform 0.1.3",
+ "cargo-credential",
+ "cargo-credential-libsecret",
+ "cargo-credential-macos-keychain",
+ "cargo-credential-wincred",
+ "cargo-platform 0.1.4",
"cargo-test-macro",
"cargo-test-support",
"cargo-util",
- "clap 4.3.3",
+ "clap",
"crates-io",
"curl",
"curl-sys",
- "env_logger 0.10.0",
"filetime",
"flate2",
"fwdansi",
@@ -288,7 +286,7 @@ dependencies = [
"hmac",
"home 0.5.5",
"http-auth",
- "humantime 2.1.0",
+ "humantime",
"ignore",
"im-rc",
"indexmap",
@@ -297,14 +295,12 @@ dependencies = [
"lazycell",
"libc",
"libgit2-sys",
- "log",
"memchr",
"opener",
"openssl",
"os_info",
"pasetors",
"pathdiff",
- "pretty_env_logger",
"pulldown-cmark",
"rand",
"rustfix",
@@ -318,27 +314,40 @@ dependencies = [
"shell-escape",
"snapbox",
"strip-ansi-escapes",
- "syn 2.0.18",
+ "syn 2.0.28",
"tar",
"tempfile",
"termcolor",
"time",
"toml",
"toml_edit",
+ "tracing",
+ "tracing-subscriber",
+ "unicase",
"unicode-width",
"unicode-xid",
"url",
"walkdir",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
name = "cargo-credential"
-version = "0.2.0"
+version = "0.3.0"
+dependencies = [
+ "anyhow",
+ "libc",
+ "serde",
+ "serde_json",
+ "snapbox",
+ "thiserror",
+ "time",
+ "windows-sys",
+]
[[package]]
name = "cargo-credential-1password"
-version = "0.2.0"
+version = "0.3.0"
dependencies = [
"cargo-credential",
"serde",
@@ -346,16 +355,17 @@ dependencies = [
]
[[package]]
-name = "cargo-credential-gnome-secret"
-version = "0.2.0"
+name = "cargo-credential-libsecret"
+version = "0.3.1"
dependencies = [
+ "anyhow",
"cargo-credential",
- "pkg-config",
+ "libloading",
]
[[package]]
name = "cargo-credential-macos-keychain"
-version = "0.2.0"
+version = "0.3.0"
dependencies = [
"cargo-credential",
"security-framework",
@@ -363,10 +373,10 @@ dependencies = [
[[package]]
name = "cargo-credential-wincred"
-version = "0.2.0"
+version = "0.3.0"
dependencies = [
"cargo-credential",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -380,7 +390,7 @@ dependencies = [
[[package]]
name = "cargo-platform"
-version = "0.1.3"
+version = "0.1.4"
dependencies = [
"serde",
]
@@ -412,12 +422,12 @@ dependencies = [
"time",
"toml",
"url",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
name = "cargo-util"
-version = "0.2.5"
+version = "0.2.6"
dependencies = [
"anyhow",
"core-foundation",
@@ -425,14 +435,14 @@ dependencies = [
"hex",
"jobserver",
"libc",
- "log",
"miow",
"same-file",
"sha2",
"shell-escape",
"tempfile",
+ "tracing",
"walkdir",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -470,34 +480,49 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
-name = "clap"
-version = "2.34.0"
+name = "ciborium"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926"
dependencies = [
- "bitflags 1.3.2",
- "textwrap",
- "unicode-width",
+ "ciborium-io",
+ "ciborium-ll",
+ "serde",
+]
+
+[[package]]
+name = "ciborium-io"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656"
+
+[[package]]
+name = "ciborium-ll"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b"
+dependencies = [
+ "ciborium-io",
+ "half",
]
[[package]]
name = "clap"
-version = "4.3.3"
+version = "4.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca8f255e4b8027970e78db75e78831229c9815fdbfa67eb1a1b777a62e24b4a0"
+checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
-version = "4.3.3"
+version = "4.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "acd4f3c17c83b0ba34ffbc4f8bbd74f079413f747f84a6f89292f138057e36ab"
+checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1"
dependencies = [
"anstream",
"anstyle",
- "bitflags 1.3.2",
"clap_lex",
"strsim",
"terminal_size",
@@ -563,13 +588,13 @@ dependencies = [
[[package]]
name = "crates-io"
-version = "0.37.0"
+version = "0.38.0"
dependencies = [
- "anyhow",
"curl",
"percent-encoding",
"serde",
"serde_json",
+ "thiserror",
"url",
]
@@ -584,24 +609,24 @@ dependencies = [
[[package]]
name = "criterion"
-version = "0.3.6"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f"
+checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
dependencies = [
- "atty",
+ "anes",
"cast",
- "clap 2.34.0",
+ "ciborium",
+ "clap",
"criterion-plot",
- "csv",
+ "is-terminal",
"itertools",
- "lazy_static",
"num-traits",
+ "once_cell",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
- "serde_cbor",
"serde_derive",
"serde_json",
"tinytemplate",
@@ -610,9 +635,9 @@ dependencies = [
[[package]]
name = "criterion-plot"
-version = "0.4.5"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876"
+checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"cast",
"itertools",
@@ -684,43 +709,12 @@ dependencies = [
]
[[package]]
-name = "csv"
-version = "1.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086"
-dependencies = [
- "csv-core",
- "itoa 1.0.6",
- "ryu",
- "serde",
-]
-
-[[package]]
-name = "csv-core"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
-dependencies = [
- "memchr",
-]
-
-[[package]]
name = "ct-codecs"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3b7eb4404b8195a9abb6356f4ac07d8ba267045c8d6d220ac4dc992e6cc75df"
[[package]]
-name = "ctor"
-version = "0.1.26"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
-dependencies = [
- "quote",
- "syn 1.0.109",
-]
-
-[[package]]
name = "curl"
version = "0.4.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -737,9 +731,9 @@ dependencies = [
[[package]]
name = "curl-sys"
-version = "0.4.63+curl-8.1.2"
+version = "0.4.65+curl-8.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aeb0fef7046022a1e2ad67a004978f0e3cacb9e3123dc62ce768f92197b771dc"
+checksum = "961ba061c9ef2fe34bbd12b807152d96f0badd2bebe7b90ce6c8c8b7572a0986"
dependencies = [
"cc",
"libc",
@@ -837,30 +831,10 @@ dependencies = [
]
[[package]]
-name = "env_logger"
-version = "0.7.1"
+name = "equivalent"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
-dependencies = [
- "atty",
- "humantime 1.3.0",
- "log",
- "regex",
- "termcolor",
-]
-
-[[package]]
-name = "env_logger"
-version = "0.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0"
-dependencies = [
- "humantime 2.1.0",
- "is-terminal",
- "log",
- "regex",
- "termcolor",
-]
+checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "errno"
@@ -870,7 +844,7 @@ checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a"
dependencies = [
"errno-dragonfly",
"libc",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -884,6 +858,18 @@ dependencies = [
]
[[package]]
+name = "escargot"
+version = "0.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "768064bd3a0e2bedcba91dc87ace90beea91acc41b6a01a3ca8e9aa8827461bf"
+dependencies = [
+ "log",
+ "once_cell",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
name = "fastrand"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -893,6 +879,12 @@ dependencies = [
]
[[package]]
+name = "fastrand"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
+
+[[package]]
name = "ff"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -917,7 +909,7 @@ dependencies = [
"cfg-if",
"libc",
"redox_syscall 0.2.16",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -1172,7 +1164,7 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f216df1c33e6e1555923eff0096858a879e8aaadd35b5d788641e4e8064c892"
dependencies = [
- "bitflags 2.3.2",
+ "bitflags 2.3.3",
"bstr",
"gix-path",
"libc",
@@ -1269,7 +1261,7 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd0ade1e80ab1f079703d1824e1daf73009096386aa7fd2f0477f6e4ac0a558e"
dependencies = [
- "bitflags 2.3.2",
+ "bitflags 2.3.3",
"bstr",
"gix-features",
"gix-path",
@@ -1277,9 +1269,9 @@ dependencies = [
[[package]]
name = "gix-hash"
-version = "0.11.2"
+version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee181c85d3955f54c4426e6bfaeeada4428692e1a39b8788c2ac7785fc301dd8"
+checksum = "a0dd58cdbe7ffa4032fc111864c80d5f8cecd9a2c9736c97ae7e5be834188272"
dependencies = [
"hex",
"thiserror",
@@ -1287,12 +1279,12 @@ dependencies = [
[[package]]
name = "gix-hashtable"
-version = "0.2.1"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd259bd0d96e6153e357a8cdaca76c48e103fd34208b6c0ce77b1ad995834bd2"
+checksum = "9e133bc56d938eaec1c675af7c681a51de9662b0ada779f45607b967a10da77a"
dependencies = [
"gix-hash",
- "hashbrown 0.13.2",
+ "hashbrown 0.14.0",
"parking_lot",
]
@@ -1314,7 +1306,7 @@ version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616ba958fabfb11263fa042c35690d48a6c7be4e9277e2c7e24ff263b3fe7b82"
dependencies = [
- "bitflags 2.3.2",
+ "bitflags 2.3.3",
"bstr",
"btoi",
"filetime",
@@ -1358,7 +1350,7 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "945c3ef1e912e44a5f405fc9e924edf42000566a1b257ed52cb1293300f6f08c"
dependencies = [
- "bitflags 2.3.2",
+ "bitflags 2.3.3",
"gix-commitgraph",
"gix-hash",
"gix-object",
@@ -1458,7 +1450,7 @@ dependencies = [
"gix-command",
"gix-config-value",
"parking_lot",
- "rustix",
+ "rustix 0.37.20",
"thiserror",
]
@@ -1559,7 +1551,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2b7b38b766eb95dcc5350a9c450030b69892c0902fa35f4a6d0809273bd9dae"
dependencies = [
- "bitflags 2.3.2",
+ "bitflags 2.3.3",
"gix-path",
"libc",
"windows",
@@ -1631,7 +1623,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbcfcb150c7ef553d76988467d223254045bdcad0dc6724890f32fbe96415da5"
dependencies = [
- "fastrand",
+ "fastrand 1.9.0",
]
[[package]]
@@ -1724,18 +1716,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
-version = "0.13.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
-
-[[package]]
-name = "hermit-abi"
-version = "0.1.19"
+version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
-dependencies = [
- "libc",
-]
+checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
[[package]]
name = "hermit-abi"
@@ -1748,9 +1731,9 @@ dependencies = [
[[package]]
name = "hermit-abi"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
+checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
[[package]]
name = "hex"
@@ -1782,14 +1765,14 @@ version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
dependencies = [
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
name = "home"
-version = "0.5.6"
+version = "0.5.7"
dependencies = [
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -1803,15 +1786,6 @@ dependencies = [
[[package]]
name = "humantime"
-version = "1.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
-dependencies = [
- "quick-error 1.2.3",
-]
-
-[[package]]
-name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
@@ -1869,12 +1843,12 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.9.3"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
dependencies = [
- "autocfg",
- "hashbrown 0.12.3",
+ "equivalent",
+ "hashbrown 0.14.0",
]
[[package]]
@@ -1902,9 +1876,9 @@ version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
dependencies = [
- "hermit-abi 0.3.1",
+ "hermit-abi 0.3.2",
"libc",
- "windows-sys 0.48.0",
+ "windows-sys",
]
[[package]]
@@ -1913,10 +1887,10 @@ version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f"
dependencies = [
- "hermit-abi 0.3.1",
+ "hermit-abi 0.3.2",
"io-lifetimes",
- "rustix",
- "windows-sys 0.48.0",
+ "rustix 0.37.20",
+ "windows-sys",
]
[[package]]
@@ -1987,9 +1961,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "libc"
-version = "0.2.146"
+version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
+checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "libgit2-sys"
@@ -2006,6 +1980,16 @@ dependencies = [
]
[[package]]
+name = "libloading"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d580318f95776505201b28cf98eb1fa5e4be3b689633ba6a3e6cd880ff22d8cb"
+dependencies = [
+ "cfg-if",
+ "windows-sys",
+]
+
+[[package]]
name = "libm"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2054,6 +2038,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
+name = "linux-raw-sys"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
+
+[[package]]
name = "lock_api"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2070,6 +2060,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
name = "maybe-async"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2134,11 +2133,11 @@ dependencies = [
[[package]]
name = "miow"
-version = "0.5.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
+checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
dependencies = [
- "windows-sys 0.42.0",
+ "windows-sys",
]
[[package]]
@@ -2158,6 +2157,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
+name = "normpath"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec60c60a693226186f5d6edf073232bfb6464ed97eb22cf3b01c1e8198fd97f5"
+dependencies = [
+ "windows-sys",
+]
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
+[[package]]
name = "num-traits"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2200,11 +2218,12 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
[[package]]
name = "opener"
-version = "0.5.2"
+version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "293c15678e37254c15bd2f092314abb4e51d7fdde05c2021279c12631b54f005"
+checksum = "6c62dcb6174f9cb326eac248f07e955d5d559c272730b6c03e396b443b562788"
dependencies = [
"bstr",
+ "normpath",
"winapi",
]
@@ -2231,7 +2250,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn 2.0.28",
]
[[package]]
@@ -2294,13 +2313,10 @@ dependencies = [
]
[[package]]
-name = "output_vt100"
-version = "0.1.3"
+name = "overload"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
-dependencies = [
- "winapi",
-]
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "p384"
@@ -2429,7 +2445,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn 2.0.28",
]
[[package]]
@@ -2444,6 +2460,12 @@ dependencies = [
]
[[package]]
+name = "pin-project-lite"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57"
+
+[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2495,27 +2517,15 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "pretty_assertions"
-version = "1.3.0"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
+checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
dependencies = [
- "ctor",
"diff",
- "output_vt100",
"yansi",
]
[[package]]
-name = "pretty_env_logger"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d"
-dependencies = [
- "env_logger 0.7.1",
- "log",
-]
-
-[[package]]
name = "primeorder"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2526,9 +2536,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.60"
+version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
+checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
@@ -2587,9 +2597,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "quote"
-version = "1.0.28"
+version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
+checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
dependencies = [
"proc-macro2",
]
@@ -2698,6 +2708,9 @@ name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax 0.6.29",
+]
[[package]]
name = "regex-syntax"
@@ -2760,8 +2773,21 @@ dependencies = [
"errno",
"io-lifetimes",
"libc",
- "linux-raw-sys",
- "windows-sys 0.48.0",
+ "linux-raw-sys 0.3.8",
+ "windows-sys",
+]
+
+[[package]]
+name = "rustix"
+version = "0.38.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ee020b1716f0a80e2ace9b03441a749e402e86712f15f16fe8a8f75afac732f"
+dependencies = [
+ "bitflags 2.3.3",
+ "errno",
+ "libc",
+ "linux-raw-sys 0.4.5",
+ "windows-sys",
]
[[package]]
@@ -2793,11 +2819,11 @@ dependencies = [
[[package]]
name = "schannel"
-version = "0.1.21"
+version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3"
+checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88"
dependencies = [
- "windows-sys 0.42.0",
+ "windows-sys",
]
[[package]]
@@ -2822,9 +2848,9 @@ dependencies = [
[[package]]
name = "security-framework"
-version = "2.9.1"
+version = "2.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8"
+checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
@@ -2845,9 +2871,9 @@ dependencies = [
[[package]]
name = "semver"
-version = "1.0.17"
+version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
+checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918"
dependencies = [
"serde",
]
@@ -2861,9 +2887,9 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.164"
+version = "1.0.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d"
+checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9"
dependencies = [
"serde_derive",
]
@@ -2879,40 +2905,30 @@ dependencies = [
]
[[package]]
-name = "serde_cbor"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
-dependencies = [
- "half",
- "serde",
-]
-
-[[package]]
name = "serde_derive"
-version = "1.0.164"
+version = "1.0.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68"
+checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn 2.0.28",
]
[[package]]
name = "serde_ignored"
-version = "0.1.7"
+version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "94eb4a4087ba8bdf14a9208ac44fddbf55c01a6195f7edfc511ddaff6cae45a6"
+checksum = "80c31d5c53fd39f208e770f5a20a0bb214dee2a8d0d8adba18e19ad95a482ca5"
dependencies = [
"serde",
]
[[package]]
name = "serde_json"
-version = "1.0.96"
+version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
+checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c"
dependencies = [
"itoa 1.0.6",
"ryu",
@@ -2921,9 +2937,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
-version = "0.6.2"
+version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93107647184f6027e3b7dcb2e11034cf95ffa1e3a682c67951963ac69c1c007d"
+checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186"
dependencies = [
"serde",
]
@@ -2947,9 +2963,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
[[package]]
name = "sha2"
-version = "0.10.6"
+version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
+checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8"
dependencies = [
"cfg-if",
"cpufeatures",
@@ -2957,6 +2973,15 @@ dependencies = [
]
[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
name = "shell-escape"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3023,6 +3048,7 @@ dependencies = [
"anstyle",
"content_inspector",
"dunce",
+ "escargot",
"filetime",
"normalize-line-endings",
"similar",
@@ -3100,9 +3126,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.18"
+version = "2.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
+checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
dependencies = [
"proc-macro2",
"quote",
@@ -3133,16 +3159,15 @@ dependencies = [
[[package]]
name = "tempfile"
-version = "3.6.0"
+version = "3.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6"
+checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998"
dependencies = [
- "autocfg",
"cfg-if",
- "fastrand",
+ "fastrand 2.0.0",
"redox_syscall 0.3.5",
- "rustix",
- "windows-sys 0.48.0",
+ "rustix 0.38.6",
+ "windows-sys",
]
[[package]]
@@ -3160,37 +3185,28 @@ version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237"
dependencies = [
- "rustix",
- "windows-sys 0.48.0",
-]
-
-[[package]]
-name = "textwrap"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
-dependencies = [
- "unicode-width",
+ "rustix 0.37.20",
+ "windows-sys",
]
[[package]]
name = "thiserror"
-version = "1.0.40"
+version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
+checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.40"
+version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
+checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn 2.0.28",
]
[[package]]
@@ -3259,9 +3275,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
-version = "0.7.4"
+version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d6135d499e69981f9ff0ef2167955a5333c35e36f6937d382974566b3d5b94ec"
+checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542"
dependencies = [
"serde",
"serde_spanned",
@@ -3271,18 +3287,18 @@ dependencies = [
[[package]]
name = "toml_datetime"
-version = "0.6.2"
+version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f"
+checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
-version = "0.19.10"
+version = "0.19.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739"
+checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a"
dependencies = [
"indexmap",
"serde",
@@ -3292,6 +3308,68 @@ dependencies = [
]
[[package]]
+name = "tracing"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
+dependencies = [
+ "cfg-if",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.28",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+dependencies = [
+ "lazy_static",
+ "log",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
name = "typenum"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3375,6 +3453,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
name = "varisat"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3540,7 +3624,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn 2.0.28",
"wasm-bindgen-shared",
]
@@ -3562,7 +3646,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.18",
+ "syn 2.0.28",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -3625,21 +3709,6 @@ dependencies = [
[[package]]
name = "windows-sys"
-version = "0.42.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
-dependencies = [
- "windows_aarch64_gnullvm 0.42.2",
- "windows_aarch64_msvc 0.42.2",
- "windows_i686_gnu 0.42.2",
- "windows_i686_msvc 0.42.2",
- "windows_x86_64_gnu 0.42.2",
- "windows_x86_64_gnullvm 0.42.2",
- "windows_x86_64_msvc 0.42.2",
-]
-
-[[package]]
-name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
@@ -3653,104 +3722,62 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
dependencies = [
- "windows_aarch64_gnullvm 0.48.0",
- "windows_aarch64_msvc 0.48.0",
- "windows_i686_gnu 0.48.0",
- "windows_i686_msvc 0.48.0",
- "windows_x86_64_gnu 0.48.0",
- "windows_x86_64_gnullvm 0.48.0",
- "windows_x86_64_msvc 0.48.0",
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
-
-[[package]]
-name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
-
-[[package]]
-name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
-
-[[package]]
-name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
-
-[[package]]
-name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
-
-[[package]]
-name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
-
-[[package]]
-name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
-version = "0.42.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
-
-[[package]]
-name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
[[package]]
name = "winnow"
-version = "0.4.6"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699"
+checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7"
dependencies = [
"memchr",
]
@@ -3760,21 +3787,23 @@ name = "xtask-build-man"
version = "0.0.0"
[[package]]
-name = "xtask-stale-label"
+name = "xtask-bump-check"
version = "0.0.0"
dependencies = [
- "toml_edit",
+ "anyhow",
+ "cargo",
+ "cargo-util",
+ "clap",
+ "git2",
+ "tracing",
+ "tracing-subscriber",
]
[[package]]
-name = "xtask-unpublished"
+name = "xtask-stale-label"
version = "0.0.0"
dependencies = [
- "anyhow",
- "cargo",
- "clap 4.3.3",
- "env_logger 0.10.0",
- "log",
+ "toml_edit",
]
[[package]]
diff --git a/src/tools/cargo/Cargo.toml b/src/tools/cargo/Cargo.toml
index 7e383be69..0e189a6d4 100644
--- a/src/tools/cargo/Cargo.toml
+++ b/src/tools/cargo/Cargo.toml
@@ -10,98 +10,106 @@ exclude = [
"target/", # exclude bench testing
]
+[workspace.package]
+edition = "2021"
+license = "MIT OR Apache-2.0"
+
[workspace.dependencies]
-anyhow = "1.0.47"
-base64 = "0.21.0"
-bytesize = "1.0"
+anyhow = "1.0.72"
+base64 = "0.21.2"
+bytesize = "1.2"
cargo = { path = "" }
-cargo-credential = { version = "0.2.0", path = "credential/cargo-credential" }
-cargo-platform = { path = "crates/cargo-platform", version = "0.1.3" }
+cargo-credential = { version = "0.3.0", path = "credential/cargo-credential" }
+cargo-credential-libsecret = { version = "0.3.1", path = "credential/cargo-credential-libsecret" }
+cargo-credential-wincred = { version = "0.3.0", path = "credential/cargo-credential-wincred" }
+cargo-credential-macos-keychain = { version = "0.3.0", path = "credential/cargo-credential-macos-keychain" }
+cargo-platform = { path = "crates/cargo-platform", version = "0.1.4" }
cargo-test-macro = { path = "crates/cargo-test-macro" }
cargo-test-support = { path = "crates/cargo-test-support" }
-cargo-util = { version = "0.2.5", path = "crates/cargo-util" }
+cargo-util = { version = "0.2.6", path = "crates/cargo-util" }
cargo_metadata = "0.14.0"
-clap = "4.2.0"
-core-foundation = { version = "0.9.0", features = ["mac_os_10_7_support"] }
-crates-io = { version = "0.37.0", path = "crates/crates-io" }
-criterion = { version = "0.3.5", features = ["html_reports"] }
+clap = "4.3.19"
+core-foundation = { version = "0.9.3", features = ["mac_os_10_7_support"] }
+crates-io = { version = "0.38.0", path = "crates/crates-io" }
+criterion = { version = "0.5.1", features = ["html_reports"] }
curl = "0.4.44"
-curl-sys = "0.4.63"
-env_logger = "0.10.0"
-filetime = "0.2.9"
-flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] }
+curl-sys = "0.4.65"
+filetime = "0.2.21"
+flate2 = { version = "1.0.26", default-features = false, features = ["zlib"] }
fwdansi = "1.1.0"
-git2 = "0.17.1"
+git2 = "0.17.2"
git2-curl = "0.18.0"
gix = { version = "0.45.1", default-features = false, features = ["blocking-http-transport-curl", "progress-tree"] }
gix-features-for-configuration-only = { version = "0.30.0", package = "gix-features", features = [ "parallel" ] }
-glob = "0.3.0"
-handlebars = { version = "3.2.1", features = ["dir_source"] }
-hex = "0.4.2"
+glob = "0.3.1"
+handlebars = { version = "3.5.5", features = ["dir_source"] }
+hex = "0.4.3"
hmac = "0.12.1"
home = "0.5.5"
-http-auth = { version = "0.1.6", default-features = false }
-humantime = "2.0.0"
-ignore = "0.4.7"
-im-rc = "15.0.0"
-indexmap = "1"
+http-auth = { version = "0.1.8", default-features = false }
+humantime = "2.1.0"
+ignore = "0.4.20"
+im-rc = "15.1.0"
+indexmap = "2"
itertools = "0.10.0"
jobserver = "0.1.26"
-lazy_static = "1.3.0"
-lazycell = "1.2.0"
-libc = "0.2.144"
-libgit2-sys = "0.15.1"
-log = "0.4.17"
-memchr = "2.1.3"
-miow = "0.5.0"
-opener = "0.5"
+lazy_static = "1.4.0"
+lazycell = "1.3.0"
+libc = "0.2.147"
+libgit2-sys = "0.15.2"
+libloading = "0.8.0"
+memchr = "2.5.0"
+miow = "0.6.0"
+opener = "0.6.1"
openssl ="0.10.55"
-os_info = "3.5.0"
-pasetors = { version = "0.6.4", features = ["v3", "paserk", "std", "serde"] }
+os_info = "3.7.0"
+pasetors = { version = "0.6.7", features = ["v3", "paserk", "std", "serde"] }
pathdiff = "0.2"
-percent-encoding = "2.0"
-pkg-config = "0.3.19"
-pretty_assertions = "1.3.0"
-pretty_env_logger = "0.4"
-proptest = "1.1.0"
-pulldown-cmark = { version = "0.9.2", default-features = false }
+percent-encoding = "2.3"
+pkg-config = "0.3.27"
+pretty_assertions = "1.4.0"
+proptest = "1.2.0"
+pulldown-cmark = { version = "0.9.3", default-features = false }
rand = "0.8.5"
-rustfix = "0.6.0"
+rustfix = "0.6.1"
same-file = "1.0.6"
-security-framework = "2.0.0"
-semver = { version = "1.0.3", features = ["serde"] }
-serde = "1.0.123"
+security-framework = "2.9.2"
+semver = { version = "1.0.18", features = ["serde"] }
+serde = "1.0.171"
serde-value = "0.7.0"
-serde_ignored = "0.1.0"
-serde_json = "1.0.59"
+serde_ignored = "0.1.9"
+serde_json = "1.0.104"
sha1 = "0.10.5"
-sha2 = "0.10.6"
-shell-escape = "0.1.4"
-snapbox = { version = "0.4.0", features = ["diff", "path"] }
-strip-ansi-escapes = "0.1.0"
-syn = { version = "2.0.14", features = ["extra-traits", "full"] }
+sha2 = "0.10.7"
+shell-escape = "0.1.5"
+snapbox = { version = "0.4.11", features = ["diff", "path"] }
+strip-ansi-escapes = "0.1.1"
+syn = { version = "2.0.28", features = ["extra-traits", "full"] }
tar = { version = "0.4.39", default-features = false }
-tempfile = "3.1.0"
-termcolor = "1.1.2"
-time = { version = "0.3", features = ["parsing", "formatting"] }
-toml = "0.7.0"
-toml_edit = "0.19.0"
-unicode-width = "0.1.5"
-unicode-xid = "0.2.0"
-url = "2.2.2"
-varisat = "0.2.1"
-walkdir = "2.3.1"
+tempfile = "3.7.0"
+termcolor = "1.2.0"
+thiserror = "1.0.44"
+time = { version = "0.3", features = ["parsing", "formatting", "serde"] }
+toml = "0.7.6"
+toml_edit = "0.19.14"
+tracing = "0.1.37"
+tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
+unicase = "2.6.0"
+unicode-width = "0.1.10"
+unicode-xid = "0.2.4"
+url = "2.4.0"
+varisat = "0.2.2"
+walkdir = "2.3.3"
windows-sys = "0.48"
[package]
name = "cargo"
-version = "0.73.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.74.0"
+edition.workspace = true
+license.workspace = true
homepage = "https://crates.io"
repository = "https://github.com/rust-lang/cargo"
documentation = "https://docs.rs/cargo"
-readme = "README.md"
description = """
Cargo, a package manager for Rust.
"""
@@ -115,12 +123,15 @@ anyhow.workspace = true
base64.workspace = true
bytesize.workspace = true
cargo-platform.workspace = true
+cargo-credential.workspace = true
+cargo-credential-libsecret.workspace = true
+cargo-credential-macos-keychain.workspace = true
+cargo-credential-wincred.workspace = true
cargo-util.workspace = true
clap = { workspace = true, features = ["wrap_help"] }
crates-io.workspace = true
curl = { workspace = true, features = ["http2"] }
curl-sys.workspace = true
-env_logger.workspace = true
filetime.workspace = true
flate2.workspace = true
git2.workspace = true
@@ -141,13 +152,11 @@ jobserver.workspace = true
lazycell.workspace = true
libc.workspace = true
libgit2-sys.workspace = true
-log.workspace = true
memchr.workspace = true
opener.workspace = true
os_info.workspace = true
pasetors.workspace = true
pathdiff.workspace = true
-pretty_env_logger = { workspace = true, optional = true }
pulldown-cmark.workspace = true
rand.workspace = true
rustfix.workspace = true
@@ -166,6 +175,9 @@ termcolor.workspace = true
time.workspace = true
toml.workspace = true
toml_edit.workspace = true
+tracing.workspace = true
+tracing-subscriber.workspace = true
+unicase.workspace = true
unicode-width.workspace = true
unicode-xid.workspace = true
url.workspace = true
@@ -181,10 +193,12 @@ fwdansi.workspace = true
workspace = true
features = [
"Win32_Foundation",
+ "Win32_Security",
"Win32_Storage_FileSystem",
+ "Win32_System_IO",
"Win32_System_Console",
- "Win32_System_Threading",
"Win32_System_JobObjects",
+ "Win32_System_Threading",
]
[dev-dependencies]
@@ -205,6 +219,5 @@ doc = false
[features]
vendored-openssl = ["openssl/vendored"]
vendored-libgit2 = ["libgit2-sys/vendored"]
-pretty-env-logger = ["pretty_env_logger"]
# This is primarily used by rust-lang/rust distributing cargo the executable.
all-static = ['vendored-openssl', 'curl/static-curl', 'curl/force-system-lib-on-osx']
diff --git a/src/tools/cargo/benches/benchsuite/Cargo.toml b/src/tools/cargo/benches/benchsuite/Cargo.toml
index 782e6c101..c15798787 100644
--- a/src/tools/cargo/benches/benchsuite/Cargo.toml
+++ b/src/tools/cargo/benches/benchsuite/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "benchsuite"
version = "0.0.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+edition.workspace = true
+license.workspace = true
homepage = "https://github.com/rust-lang/cargo"
repository = "https://github.com/rust-lang/cargo"
description = "Benchmarking suite for Cargo."
diff --git a/src/tools/cargo/benches/capture/Cargo.toml b/src/tools/cargo/benches/capture/Cargo.toml
index 6319a0130..e42fe70e2 100644
--- a/src/tools/cargo/benches/capture/Cargo.toml
+++ b/src/tools/cargo/benches/capture/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "capture"
version = "0.1.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+edition.workspace = true
+license.workspace = true
description = "Tool for capturing a real-world workspace for benchmarking."
publish = false
diff --git a/src/tools/cargo/ci/validate-version-bump.sh b/src/tools/cargo/ci/validate-version-bump.sh
index 9b54fdaaf..659b54c0e 100755
--- a/src/tools/cargo/ci/validate-version-bump.sh
+++ b/src/tools/cargo/ci/validate-version-bump.sh
@@ -16,46 +16,7 @@ set -euo pipefail
base_sha=$(git rev-parse "${BASE_SHA:-HEAD~1}")
head_sha=$(git rev-parse "${HEAD_SHA:-HEAD}")
-echo "Base branch is $base_sha"
-echo "Current head is $head_sha"
+echo "Base revision is $base_sha"
+echo "Head revision is $head_sha"
-# Gets crate names of members that has been changed from $bash_sha to $head_sha.
-changed_crates=$(
- git diff --name-only "$base_sha" "$head_sha" -- crates/ credential/ benches/ \
- | cut -d'/' -f2 \
- | sort -u
-)
-
-if [ -z "$changed_crates" ]
-then
- echo "No file changed in member crates."
- exit 0
-fi
-
-# Checks publish status for only crates with code changes.
-publish_status_table=$(
- echo "$changed_crates" \
- | xargs printf -- '--package %s\n' \
- | xargs cargo unpublished
-)
-
-# "yes" -> code changed but no version difference -> need a bump
-# Prints 2nd column (sep by space), which is the name of the crate.
-crates_need_bump=$(
- echo "$publish_status_table" \
- | { grep '| yes ' || true; } \
- | awk '{print $2}'
-)
-
-if [ -z "$crates_need_bump" ]
-then
- echo "No version bump needed for member crates."
- exit 0
-fi
-
-echo "Detected changes in these crates but no version bump found:"
-echo "$crates_need_bump"
-echo
-echo "Please bump at least one patch version for each corresponding Cargo.toml:"
-echo 'Run "cargo unpublished" to read the publish status table for details.'
-exit 1
+cargo bump-check --base-rev "$base_sha" --head-rev "$head_sha"
diff --git a/src/tools/cargo/crates/cargo-platform/Cargo.toml b/src/tools/cargo/crates/cargo-platform/Cargo.toml
index 423cf491d..e7f22cf87 100644
--- a/src/tools/cargo/crates/cargo-platform/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-platform/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-platform"
-version = "0.1.3"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.1.4"
+edition.workspace = true
+license.workspace = true
homepage = "https://github.com/rust-lang/cargo"
repository = "https://github.com/rust-lang/cargo"
documentation = "https://docs.rs/cargo-platform"
diff --git a/src/tools/cargo/crates/cargo-test-macro/Cargo.toml b/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
index e40602ae3..b5da0522f 100644
--- a/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-test-macro/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-test-macro"
version = "0.1.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+edition.workspace = true
+license.workspace = true
homepage = "https://github.com/rust-lang/cargo"
repository = "https://github.com/rust-lang/cargo"
documentation = "https://github.com/rust-lang/cargo"
diff --git a/src/tools/cargo/crates/cargo-test-support/Cargo.toml b/src/tools/cargo/crates/cargo-test-support/Cargo.toml
index 305c809a8..085041aff 100644
--- a/src/tools/cargo/crates/cargo-test-support/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-test-support/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-test-support"
version = "0.1.0"
-license = "MIT OR Apache-2.0"
-edition = "2021"
+license.workspace = true
+edition.workspace = true
publish = false
[lib]
diff --git a/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile b/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile
index b52eefbad..f25212770 100644
--- a/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile
+++ b/src/tools/cargo/crates/cargo-test-support/containers/sshd/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine:3.17
+FROM alpine:3.18
RUN apk add --no-cache openssh git
RUN ssh-keygen -A
diff --git a/src/tools/cargo/crates/cargo-test-support/src/compare.rs b/src/tools/cargo/crates/cargo-test-support/src/compare.rs
index 96ce52afc..21eb64d28 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/compare.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/compare.rs
@@ -192,6 +192,7 @@ fn substitute_macros(input: &str) -> String {
("[CHECKING]", " Checking"),
("[COMPLETED]", " Completed"),
("[CREATED]", " Created"),
+ ("[CREDENTIAL]", " Credential"),
("[DOWNGRADING]", " Downgrading"),
("[FINISHED]", " Finished"),
("[ERROR]", "error:"),
diff --git a/src/tools/cargo/crates/cargo-test-support/src/registry.rs b/src/tools/cargo/crates/cargo-test-support/src/registry.rs
index 910f95bfa..27c319656 100644
--- a/src/tools/cargo/crates/cargo-test-support/src/registry.rs
+++ b/src/tools/cargo/crates/cargo-test-support/src/registry.rs
@@ -104,6 +104,8 @@ pub struct RegistryBuilder {
not_found_handler: RequestCallback,
/// If nonzero, the git index update to be delayed by the given number of seconds.
delayed_index_update: usize,
+ /// Credential provider in configuration
+ credential_provider: Option<String>,
}
pub struct TestRegistry {
@@ -172,6 +174,7 @@ impl RegistryBuilder {
custom_responders: HashMap::new(),
not_found_handler: Box::new(not_found),
delayed_index_update: 0,
+ credential_provider: None,
}
}
@@ -266,6 +269,13 @@ impl RegistryBuilder {
self
}
+ /// The credential provider to configure for this registry.
+ #[must_use]
+ pub fn credential_provider(mut self, provider: &[&str]) -> Self {
+ self.credential_provider = Some(format!("['{}']", provider.join("','")));
+ self
+ }
+
/// Initializes the registry.
#[must_use]
pub fn build(self) -> TestRegistry {
@@ -336,6 +346,18 @@ impl RegistryBuilder {
.as_bytes(),
)
.unwrap();
+ if let Some(p) = &self.credential_provider {
+ append(
+ &config_path,
+ &format!(
+ "
+ credential-provider = {p}
+ "
+ )
+ .as_bytes(),
+ )
+ .unwrap()
+ }
} else {
append(
&config_path,
@@ -351,6 +373,20 @@ impl RegistryBuilder {
.as_bytes(),
)
.unwrap();
+
+ if let Some(p) = &self.credential_provider {
+ append(
+ &config_path,
+ &format!(
+ "
+ [registry]
+ credential-provider = {p}
+ "
+ )
+ .as_bytes(),
+ )
+ .unwrap()
+ }
}
}
diff --git a/src/tools/cargo/crates/cargo-util/Cargo.toml b/src/tools/cargo/crates/cargo-util/Cargo.toml
index 614581037..99a59422d 100644
--- a/src/tools/cargo/crates/cargo-util/Cargo.toml
+++ b/src/tools/cargo/crates/cargo-util/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-util"
-version = "0.2.5"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.2.6"
+edition.workspace = true
+license.workspace = true
homepage = "https://github.com/rust-lang/cargo"
repository = "https://github.com/rust-lang/cargo"
description = "Miscellaneous support code used by Cargo."
@@ -14,10 +14,10 @@ filetime.workspace = true
hex.workspace = true
jobserver.workspace = true
libc.workspace = true
-log.workspace = true
same-file.workspace = true
shell-escape.workspace = true
tempfile.workspace = true
+tracing.workspace = true
walkdir.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
diff --git a/src/tools/cargo/crates/cargo-util/src/paths.rs b/src/tools/cargo/crates/cargo-util/src/paths.rs
index 4a917821b..ce6755859 100644
--- a/src/tools/cargo/crates/cargo-util/src/paths.rs
+++ b/src/tools/cargo/crates/cargo-util/src/paths.rs
@@ -237,7 +237,7 @@ pub fn mtime_recursive(path: &Path) -> Result<FileTime> {
Err(e) => {
// Ignore errors while walking. If Cargo can't access it, the
// build script probably can't access it, either.
- log::debug!("failed to determine mtime while walking directory: {}", e);
+ tracing::debug!("failed to determine mtime while walking directory: {}", e);
None
}
})
@@ -252,7 +252,7 @@ pub fn mtime_recursive(path: &Path) -> Result<FileTime> {
// I'm not sure when this is really possible (maybe a
// race with unlinking?). Regardless, if Cargo can't
// read it, the build script probably can't either.
- log::debug!(
+ tracing::debug!(
"failed to determine mtime while fetching symlink metadata of {}: {}",
e.path().display(),
err
@@ -271,7 +271,7 @@ pub fn mtime_recursive(path: &Path) -> Result<FileTime> {
// Can't access the symlink target. If Cargo can't
// access it, the build script probably can't access
// it either.
- log::debug!(
+ tracing::debug!(
"failed to determine mtime of symlink target for {}: {}",
e.path().display(),
err
@@ -286,7 +286,7 @@ pub fn mtime_recursive(path: &Path) -> Result<FileTime> {
// I'm not sure when this is really possible (maybe a
// race with unlinking?). Regardless, if Cargo can't
// read it, the build script probably can't either.
- log::debug!(
+ tracing::debug!(
"failed to determine mtime while fetching metadata of {}: {}",
e.path().display(),
err
@@ -314,7 +314,7 @@ pub fn set_invocation_time(path: &Path) -> Result<FileTime> {
"This file has an mtime of when this was started.",
)?;
let ft = mtime(&timestamp)?;
- log::debug!("invocation time for {:?} is {}", path, ft);
+ tracing::debug!("invocation time for {:?} is {}", path, ft);
Ok(ft)
}
@@ -508,7 +508,7 @@ pub fn link_or_copy(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()>
}
fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> {
- log::debug!("linking {} to {}", src.display(), dst.display());
+ tracing::debug!("linking {} to {}", src.display(), dst.display());
if same_file::is_same_file(src, dst).unwrap_or(false) {
return Ok(());
}
@@ -567,7 +567,7 @@ fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> {
};
link_result
.or_else(|err| {
- log::debug!("link failed {}. falling back to fs::copy", err);
+ tracing::debug!("link failed {}. falling back to fs::copy", err);
fs::copy(src, dst).map(|_| ())
})
.with_context(|| {
@@ -598,8 +598,8 @@ pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
pub fn set_file_time_no_err<P: AsRef<Path>>(path: P, time: FileTime) {
let path = path.as_ref();
match filetime::set_file_times(path, time, time) {
- Ok(()) => log::debug!("set file mtime {} to {}", path.display(), time),
- Err(e) => log::warn!(
+ Ok(()) => tracing::debug!("set file mtime {} to {}", path.display(), time),
+ Err(e) => tracing::warn!(
"could not set mtime of {} to {}: {:?}",
path.display(),
time,
@@ -621,7 +621,7 @@ pub fn strip_prefix_canonical<P: AsRef<Path>>(
let safe_canonicalize = |path: &Path| match path.canonicalize() {
Ok(p) => p,
Err(e) => {
- log::warn!("cannot canonicalize {:?}: {:?}", path, e);
+ tracing::warn!("cannot canonicalize {:?}: {:?}", path, e);
path.to_path_buf()
}
};
diff --git a/src/tools/cargo/crates/cargo-util/src/process_builder.rs b/src/tools/cargo/crates/cargo-util/src/process_builder.rs
index 76392f256..b197b95b1 100644
--- a/src/tools/cargo/crates/cargo-util/src/process_builder.rs
+++ b/src/tools/cargo/crates/cargo-util/src/process_builder.rs
@@ -449,7 +449,7 @@ impl ProcessBuilder {
arg.push(tmp.path());
let mut cmd = self.build_command_without_args();
cmd.arg(arg);
- log::debug!("created argfile at {} for {self}", tmp.path().display());
+ tracing::debug!("created argfile at {} for {self}", tmp.path().display());
let cap = self.get_args().map(|arg| arg.len() + 1).sum::<usize>();
let mut buf = Vec::with_capacity(cap);
@@ -558,7 +558,7 @@ fn piped(cmd: &mut Command, pipe_stdin: bool) -> &mut Command {
fn close_tempfile_and_log_error(file: NamedTempFile) {
file.close().unwrap_or_else(|e| {
- log::warn!("failed to close temporary file: {e}");
+ tracing::warn!("failed to close temporary file: {e}");
});
}
diff --git a/src/tools/cargo/crates/crates-io/Cargo.toml b/src/tools/cargo/crates/crates-io/Cargo.toml
index 034c2fca5..139b8aa97 100644
--- a/src/tools/cargo/crates/crates-io/Cargo.toml
+++ b/src/tools/cargo/crates/crates-io/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "crates-io"
-version = "0.37.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.38.0"
+edition.workspace = true
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = """
Helpers for interacting with crates.io
@@ -13,9 +13,9 @@ name = "crates_io"
path = "lib.rs"
[dependencies]
-anyhow.workspace = true
curl.workspace = true
percent-encoding.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
+thiserror.workspace = true
url.workspace = true
diff --git a/src/tools/cargo/crates/crates-io/lib.rs b/src/tools/cargo/crates/crates-io/lib.rs
index 243808098..6ce39cefd 100644
--- a/src/tools/cargo/crates/crates-io/lib.rs
+++ b/src/tools/cargo/crates/crates-io/lib.rs
@@ -1,18 +1,18 @@
#![allow(clippy::all)]
use std::collections::BTreeMap;
-use std::fmt;
use std::fs::File;
use std::io::prelude::*;
use std::io::{Cursor, SeekFrom};
use std::time::Instant;
-use anyhow::{bail, format_err, Context, Result};
use curl::easy::{Easy, List};
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
use serde::{Deserialize, Serialize};
use url::Url;
+pub type Result<T> = std::result::Result<T, Error>;
+
pub struct Registry {
/// The base URL for issuing API requests.
host: String,
@@ -125,67 +125,62 @@ struct Crates {
meta: TotalCrates,
}
-#[derive(Debug)]
-pub enum ResponseError {
- Curl(curl::Error),
+/// Error returned when interacting with a registry.
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+ /// Error from libcurl.
+ #[error(transparent)]
+ Curl(#[from] curl::Error),
+
+ /// Error from seriailzing the request payload and deserialzing the
+ /// response body (like response body didn't match expected structure).
+ #[error(transparent)]
+ Json(#[from] serde_json::Error),
+
+ /// Error from IO. Mostly from reading the tarball to upload.
+ #[error("failed to seek tarball")]
+ Io(#[from] std::io::Error),
+
+ /// Response body was not valid utf8.
+ #[error("invalid response body from server")]
+ Utf8(#[from] std::string::FromUtf8Error),
+
+ /// Error from API response containing JSON field `errors.details`.
+ #[error(
+ "the remote server responded with an error{}: {}",
+ status(*code),
+ errors.join(", "),
+ )]
Api {
code: u32,
+ headers: Vec<String>,
errors: Vec<String>,
},
+
+ /// Error from API response which didn't have pre-programmed `errors.details`.
+ #[error(
+ "failed to get a 200 OK response, got {code}\nheaders:\n\t{}\nbody:\n{body}",
+ headers.join("\n\t"),
+ )]
Code {
code: u32,
headers: Vec<String>,
body: String,
},
- Other(anyhow::Error),
-}
-
-impl std::error::Error for ResponseError {
- fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
- match self {
- ResponseError::Curl(..) => None,
- ResponseError::Api { .. } => None,
- ResponseError::Code { .. } => None,
- ResponseError::Other(e) => Some(e.as_ref()),
- }
- }
-}
-impl fmt::Display for ResponseError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- ResponseError::Curl(e) => write!(f, "{}", e),
- ResponseError::Api { code, errors } => {
- f.write_str("the remote server responded with an error")?;
- if *code != 200 {
- write!(f, " (status {} {})", code, reason(*code))?;
- };
- write!(f, ": {}", errors.join(", "))
- }
- ResponseError::Code {
- code,
- headers,
- body,
- } => write!(
- f,
- "failed to get a 200 OK response, got {}\n\
- headers:\n\
- \t{}\n\
- body:\n\
- {}",
- code,
- headers.join("\n\t"),
- body
- ),
- ResponseError::Other(..) => write!(f, "invalid response from server"),
- }
- }
-}
-
-impl From<curl::Error> for ResponseError {
- fn from(error: curl::Error) -> Self {
- ResponseError::Curl(error)
- }
+ /// Reason why the token was invalid.
+ #[error("{0}")]
+ InvalidToken(&'static str),
+
+ /// Server was unavailable and timeouted. Happened when uploading a way
+ /// too large tarball to crates.io.
+ #[error(
+ "Request timed out after 30 seconds. If you're trying to \
+ upload a crate it may be too large. If the crate is under \
+ 10MB in size, you can email help@crates.io for assistance.\n\
+ Total size was {0}."
+ )]
+ Timeout(u64),
}
impl Registry {
@@ -221,10 +216,9 @@ impl Registry {
}
fn token(&self) -> Result<&str> {
- let token = match self.token.as_ref() {
- Some(s) => s,
- None => bail!("no upload token found, please run `cargo login`"),
- };
+ let token = self.token.as_ref().ok_or_else(|| {
+ Error::InvalidToken("no upload token found, please run `cargo login`")
+ })?;
check_token(token)?;
Ok(token)
}
@@ -270,12 +264,8 @@ impl Registry {
// This checks the length using seeking instead of metadata, because
// on some filesystems, getting the metadata will fail because
// the file was renamed in ops::package.
- let tarball_len = tarball
- .seek(SeekFrom::End(0))
- .with_context(|| "failed to seek tarball")?;
- tarball
- .seek(SeekFrom::Start(0))
- .with_context(|| "failed to seek tarball")?;
+ let tarball_len = tarball.seek(SeekFrom::End(0))?;
+ tarball.seek(SeekFrom::Start(0))?;
let header = {
let mut w = Vec::new();
w.extend(&(json.len() as u32).to_le_bytes());
@@ -300,18 +290,12 @@ impl Registry {
let body = self
.handle(&mut |buf| body.read(buf).unwrap_or(0))
.map_err(|e| match e {
- ResponseError::Code { code, .. }
+ Error::Code { code, .. }
if code == 503
&& started.elapsed().as_secs() >= 29
&& self.host_is_crates_io() =>
{
- format_err!(
- "Request timed out after 30 seconds. If you're trying to \
- upload a crate it may be too large. If the crate is under \
- 10MB in size, you can email help@crates.io for assistance.\n\
- Total size was {}.",
- tarball_len
- )
+ Error::Timeout(tarball_len)
}
_ => e.into(),
})?;
@@ -410,10 +394,7 @@ impl Registry {
}
}
- fn handle(
- &mut self,
- read: &mut dyn FnMut(&mut [u8]) -> usize,
- ) -> std::result::Result<String, ResponseError> {
+ fn handle(&mut self, read: &mut dyn FnMut(&mut [u8]) -> usize) -> Result<String> {
let mut headers = Vec::new();
let mut body = Vec::new();
{
@@ -427,28 +408,29 @@ impl Registry {
// Headers contain trailing \r\n, trim them to make it easier
// to work with.
let s = String::from_utf8_lossy(data).trim().to_string();
+ // Don't let server sneak extra lines anywhere.
+ if s.contains('\n') {
+ return true;
+ }
headers.push(s);
true
})?;
handle.perform()?;
}
- let body = match String::from_utf8(body) {
- Ok(body) => body,
- Err(..) => {
- return Err(ResponseError::Other(format_err!(
- "response body was not valid utf-8"
- )))
- }
- };
+ let body = String::from_utf8(body)?;
let errors = serde_json::from_str::<ApiErrorList>(&body)
.ok()
.map(|s| s.errors.into_iter().map(|s| s.detail).collect::<Vec<_>>());
match (self.handle.response_code()?, errors) {
(0, None) | (200, None) => Ok(body),
- (code, Some(errors)) => Err(ResponseError::Api { code, errors }),
- (code, None) => Err(ResponseError::Code {
+ (code, Some(errors)) => Err(Error::Api {
+ code,
+ headers,
+ errors,
+ }),
+ (code, None) => Err(Error::Code {
code,
headers,
body,
@@ -457,6 +439,15 @@ impl Registry {
}
}
+fn status(code: u32) -> String {
+ if code == 200 {
+ String::new()
+ } else {
+ let reason = reason(code);
+ format!(" (status {code} {reason})")
+ }
+}
+
fn reason(code: u32) -> &'static str {
// Taken from https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
match code {
@@ -520,7 +511,7 @@ pub fn is_url_crates_io(url: &str) -> bool {
/// registries only create tokens in that format so that is as less restricted as possible.
pub fn check_token(token: &str) -> Result<()> {
if token.is_empty() {
- bail!("please provide a non-empty token");
+ return Err(Error::InvalidToken("please provide a non-empty token"));
}
if token.bytes().all(|b| {
// This is essentially the US-ASCII limitation of
@@ -531,9 +522,9 @@ pub fn check_token(token: &str) -> Result<()> {
}) {
Ok(())
} else {
- Err(anyhow::anyhow!(
+ Err(Error::InvalidToken(
"token contains invalid characters.\nOnly printable ISO-8859-1 characters \
- are allowed as it is sent in a HTTPS header."
+ are allowed as it is sent in a HTTPS header.",
))
}
}
diff --git a/src/tools/cargo/crates/home/Cargo.toml b/src/tools/cargo/crates/home/Cargo.toml
index 6c65ecc18..03bd555a2 100644
--- a/src/tools/cargo/crates/home/Cargo.toml
+++ b/src/tools/cargo/crates/home/Cargo.toml
@@ -1,9 +1,9 @@
[package]
name = "home"
-version = "0.5.6" # also update `html_root_url` in `src/lib.rs`
+version = "0.5.7" # also update `html_root_url` in `src/lib.rs`
authors = ["Brian Anderson <andersrb@gmail.com>"]
documentation = "https://docs.rs/home"
-edition = "2018"
+edition.workspace = true
include = [
"/src",
"/Cargo.toml",
@@ -11,8 +11,7 @@ include = [
"/LICENSE-*",
"/README.md",
]
-license = "MIT OR Apache-2.0"
-readme = "README.md"
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = "Shared definitions of home directories."
diff --git a/src/tools/cargo/crates/mdman/Cargo.toml b/src/tools/cargo/crates/mdman/Cargo.toml
index 812f1393a..ba1d4b462 100644
--- a/src/tools/cargo/crates/mdman/Cargo.toml
+++ b/src/tools/cargo/crates/mdman/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "mdman"
version = "0.0.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+edition.workspace = true
+license.workspace = true
description = "Creates a man page page from markdown."
publish = false
diff --git a/src/tools/cargo/crates/resolver-tests/Cargo.toml b/src/tools/cargo/crates/resolver-tests/Cargo.toml
index e0efb9b6d..5e69d7367 100644
--- a/src/tools/cargo/crates/resolver-tests/Cargo.toml
+++ b/src/tools/cargo/crates/resolver-tests/Cargo.toml
@@ -1,7 +1,7 @@
[package]
name = "resolver-tests"
version = "0.0.0"
-edition = "2018"
+edition.workspace = true
publish = false
[dependencies]
diff --git a/src/tools/cargo/crates/semver-check/Cargo.toml b/src/tools/cargo/crates/semver-check/Cargo.toml
index f7b8c7d48..17e696566 100644
--- a/src/tools/cargo/crates/semver-check/Cargo.toml
+++ b/src/tools/cargo/crates/semver-check/Cargo.toml
@@ -2,7 +2,7 @@
name = "semver-check"
version = "0.0.0"
authors = ["Eric Huss"]
-edition = "2021"
+edition.workspace = true
publish = false
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
diff --git a/src/tools/cargo/crates/semver-check/src/main.rs b/src/tools/cargo/crates/semver-check/src/main.rs
index fa4639eb7..1ba405f57 100644
--- a/src/tools/cargo/crates/semver-check/src/main.rs
+++ b/src/tools/cargo/crates/semver-check/src/main.rs
@@ -7,6 +7,11 @@
//! An example with the word "MINOR" at the top is expected to successfully
//! build against the before and after. Otherwise it should fail. A comment of
//! "// Error:" will check that the given message appears in the error output.
+//!
+//! The code block can also include the annotations:
+//! - `run-fail`: The test should fail at runtime, not compiletime.
+//! - `dont-deny`: By default tests have a `#![deny(warnings)]`. This option
+//! avoids this attribute. Note that `#![allow(unused)]` is always added.
use std::error::Error;
use std::fs;
@@ -57,7 +62,13 @@ fn doit() -> Result<(), Box<dyn Error>> {
if line.trim() == "```" {
break;
}
- block.push(line);
+ // Support rustdoc/mdbook hidden lines.
+ let line = line.strip_prefix("# ").unwrap_or(line);
+ if line == "#" {
+ block.push("");
+ } else {
+ block.push(line);
+ }
}
None => {
return Err(format!(
diff --git a/src/tools/cargo/crates/xtask-build-man/Cargo.toml b/src/tools/cargo/crates/xtask-build-man/Cargo.toml
index 6d02aa2c3..bec10c48c 100644
--- a/src/tools/cargo/crates/xtask-build-man/Cargo.toml
+++ b/src/tools/cargo/crates/xtask-build-man/Cargo.toml
@@ -1,7 +1,7 @@
[package]
name = "xtask-build-man"
version = "0.0.0"
-edition = "2021"
+edition.workspace = true
publish = false
[dependencies]
diff --git a/src/tools/cargo/crates/xtask-bump-check/Cargo.toml b/src/tools/cargo/crates/xtask-bump-check/Cargo.toml
new file mode 100644
index 000000000..e965ad09e
--- /dev/null
+++ b/src/tools/cargo/crates/xtask-bump-check/Cargo.toml
@@ -0,0 +1,14 @@
+[package]
+name = "xtask-bump-check"
+version = "0.0.0"
+edition.workspace = true
+publish = false
+
+[dependencies]
+anyhow.workspace = true
+cargo.workspace = true
+cargo-util.workspace = true
+clap.workspace = true
+git2.workspace = true
+tracing.workspace = true
+tracing-subscriber.workspace = true
diff --git a/src/tools/cargo/crates/xtask-unpublished/src/main.rs b/src/tools/cargo/crates/xtask-bump-check/src/main.rs
index 1942a3621..0461ab91a 100644
--- a/src/tools/cargo/crates/xtask-unpublished/src/main.rs
+++ b/src/tools/cargo/crates/xtask-bump-check/src/main.rs
@@ -1,7 +1,8 @@
mod xtask;
fn main() {
- env_logger::init_from_env("CARGO_LOG");
+ setup_logger();
+
let cli = xtask::cli();
let matches = cli.get_matches();
@@ -13,3 +14,14 @@ fn main() {
cargo::exit_with_error(e, &mut config.shell())
}
}
+
+// In sync with `src/bin/cargo/main.rs@setup_logger`.
+fn setup_logger() {
+ let env = tracing_subscriber::EnvFilter::from_env("CARGO_LOG");
+
+ tracing_subscriber::fmt()
+ .with_ansi(std::io::IsTerminal::is_terminal(&std::io::stderr()))
+ .with_writer(std::io::stderr)
+ .with_env_filter(env)
+ .init();
+}
diff --git a/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs b/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs
new file mode 100644
index 000000000..f89152331
--- /dev/null
+++ b/src/tools/cargo/crates/xtask-bump-check/src/xtask.rs
@@ -0,0 +1,423 @@
+//! ```text
+//! NAME
+//! xtask-bump-check
+//!
+//! SYNOPSIS
+//! xtask-bump-check --base-rev <REV> --head-rev <REV>
+//!
+//! DESCRIPTION
+//! Checks if there is any member got changed since a base commit
+//! but forgot to bump its version.
+//! ```
+
+use std::collections::HashMap;
+use std::fmt::Write;
+use std::fs;
+use std::task;
+
+use cargo::core::dependency::Dependency;
+use cargo::core::registry::PackageRegistry;
+use cargo::core::Package;
+use cargo::core::QueryKind;
+use cargo::core::Registry;
+use cargo::core::SourceId;
+use cargo::core::Workspace;
+use cargo::util::command_prelude::*;
+use cargo::util::ToSemver;
+use cargo::CargoResult;
+use cargo_util::ProcessBuilder;
+
+const UPSTREAM_BRANCH: &str = "master";
+const STATUS: &str = "BumpCheck";
+
+pub fn cli() -> clap::Command {
+ clap::Command::new("xtask-bump-check")
+ .arg(
+ opt(
+ "verbose",
+ "Use verbose output (-vv very verbose/build.rs output)",
+ )
+ .short('v')
+ .action(ArgAction::Count)
+ .global(true),
+ )
+ .arg_quiet()
+ .arg(
+ opt("color", "Coloring: auto, always, never")
+ .value_name("WHEN")
+ .global(true),
+ )
+ .arg(opt("base-rev", "Git revision to lookup for a baseline"))
+ .arg(opt("head-rev", "Git revision with changes"))
+ .arg(flag("frozen", "Require Cargo.lock and cache are up to date").global(true))
+ .arg(flag("locked", "Require Cargo.lock is up to date").global(true))
+ .arg(flag("offline", "Run without accessing the network").global(true))
+ .arg(multi_opt("config", "KEY=VALUE", "Override a configuration value").global(true))
+ .arg(
+ Arg::new("unstable-features")
+ .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
+ .short('Z')
+ .value_name("FLAG")
+ .action(ArgAction::Append)
+ .global(true),
+ )
+}
+
+pub fn exec(args: &clap::ArgMatches, config: &mut cargo::util::Config) -> cargo::CliResult {
+ config_configure(config, args)?;
+
+ bump_check(args, config)?;
+
+ Ok(())
+}
+
+fn config_configure(config: &mut Config, args: &ArgMatches) -> CliResult {
+ let verbose = args.verbose();
+ // quiet is unusual because it is redefined in some subcommands in order
+ // to provide custom help text.
+ let quiet = args.flag("quiet");
+ let color = args.get_one::<String>("color").map(String::as_str);
+ let frozen = args.flag("frozen");
+ let locked = args.flag("locked");
+ let offline = args.flag("offline");
+ let mut unstable_flags = vec![];
+ if let Some(values) = args.get_many::<String>("unstable-features") {
+ unstable_flags.extend(values.cloned());
+ }
+ let mut config_args = vec![];
+ if let Some(values) = args.get_many::<String>("config") {
+ config_args.extend(values.cloned());
+ }
+ config.configure(
+ verbose,
+ quiet,
+ color,
+ frozen,
+ locked,
+ offline,
+ &None,
+ &unstable_flags,
+ &config_args,
+ )?;
+ Ok(())
+}
+
+/// Main entry of `xtask-bump-check`.
+///
+/// Assumption: version number are incremental. We never have point release for old versions.
+fn bump_check(args: &clap::ArgMatches, config: &mut cargo::util::Config) -> CargoResult<()> {
+ let ws = args.workspace(config)?;
+ let repo = git2::Repository::open(ws.root())?;
+ let base_commit = get_base_commit(config, args, &repo)?;
+ let head_commit = get_head_commit(args, &repo)?;
+ let referenced_commit = get_referenced_commit(&repo, &base_commit)?;
+ let changed_members = changed(&ws, &repo, &base_commit, &head_commit)?;
+ let status = |msg: &str| config.shell().status(STATUS, msg);
+
+ status(&format!("base commit `{}`", base_commit.id()))?;
+ status(&format!("head commit `{}`", head_commit.id()))?;
+
+ let mut needs_bump = Vec::new();
+
+ check_crates_io(config, &changed_members, &mut needs_bump)?;
+
+ if let Some(referenced_commit) = referenced_commit.as_ref() {
+ status(&format!("compare against `{}`", referenced_commit.id()))?;
+ for referenced_member in checkout_ws(&ws, &repo, referenced_commit)?.members() {
+ let pkg_name = referenced_member.name().as_str();
+ let Some(changed_member) = changed_members.get(pkg_name) else {
+ tracing::trace!("skipping {pkg_name}, may be removed or not published");
+ continue;
+ };
+
+ if changed_member.version() <= referenced_member.version() {
+ needs_bump.push(*changed_member);
+ }
+ }
+ }
+
+ if !needs_bump.is_empty() {
+ needs_bump.sort();
+ needs_bump.dedup();
+ let mut msg = String::new();
+ msg.push_str("Detected changes in these crates but no version bump found:\n");
+ for pkg in needs_bump {
+ writeln!(&mut msg, " {}@{}", pkg.name(), pkg.version())?;
+ }
+ msg.push_str("\nPlease bump at least one patch version in each corresponding Cargo.toml.");
+ anyhow::bail!(msg)
+ }
+
+ // Tracked by https://github.com/obi1kenobi/cargo-semver-checks/issues/511
+ let exclude_args = [
+ "--exclude",
+ "cargo-credential-1password",
+ "--exclude",
+ "cargo-credential-libsecret",
+ "--exclude",
+ "cargo-credential-macos-keychain",
+ "--exclude",
+ "cargo-credential-wincred",
+ ];
+
+ // Even when we test against baseline-rev, we still need to make sure a
+ // change doesn't violate SemVer rules aginst crates.io releases. The
+ // possibility of this happening is nearly zero but no harm to check twice.
+ let mut cmd = ProcessBuilder::new("cargo");
+ cmd.arg("semver-checks")
+ .arg("check-release")
+ .arg("--workspace")
+ .args(&exclude_args);
+ config.shell().status("Running", &cmd)?;
+ cmd.exec()?;
+
+ if let Some(referenced_commit) = referenced_commit.as_ref() {
+ let mut cmd = ProcessBuilder::new("cargo");
+ cmd.arg("semver-checks")
+ .arg("--workspace")
+ .arg("--baseline-rev")
+ .arg(referenced_commit.id().to_string())
+ .args(&exclude_args);
+ config.shell().status("Running", &cmd)?;
+ cmd.exec()?;
+ }
+
+ status("no version bump needed for member crates.")?;
+
+ return Ok(());
+}
+
+/// Returns the commit of upstream `master` branch if `base-rev` is missing.
+fn get_base_commit<'a>(
+ config: &Config,
+ args: &clap::ArgMatches,
+ repo: &'a git2::Repository,
+) -> CargoResult<git2::Commit<'a>> {
+ let base_commit = match args.get_one::<String>("base-rev") {
+ Some(sha) => {
+ let obj = repo.revparse_single(sha)?;
+ obj.peel_to_commit()?
+ }
+ None => {
+ let upstream_branches = repo
+ .branches(Some(git2::BranchType::Remote))?
+ .filter_map(|r| r.ok())
+ .filter(|(b, _)| {
+ b.name()
+ .ok()
+ .flatten()
+ .unwrap_or_default()
+ .ends_with(&format!("/{UPSTREAM_BRANCH}"))
+ })
+ .map(|(b, _)| b)
+ .collect::<Vec<_>>();
+ if upstream_branches.is_empty() {
+ anyhow::bail!(
+ "could not find `base-sha` for `{UPSTREAM_BRANCH}`, pass it in directly"
+ );
+ }
+ let upstream_ref = upstream_branches[0].get();
+ if upstream_branches.len() > 1 {
+ let name = upstream_ref.name().expect("name is valid UTF-8");
+ let _ = config.shell().warn(format!(
+ "multiple `{UPSTREAM_BRANCH}` found, picking {name}"
+ ));
+ }
+ upstream_ref.peel_to_commit()?
+ }
+ };
+ Ok(base_commit)
+}
+
+/// Returns `HEAD` of the Git repository if `head-rev` is missing.
+fn get_head_commit<'a>(
+ args: &clap::ArgMatches,
+ repo: &'a git2::Repository,
+) -> CargoResult<git2::Commit<'a>> {
+ let head_commit = match args.get_one::<String>("head-rev") {
+ Some(sha) => {
+ let head_obj = repo.revparse_single(sha)?;
+ head_obj.peel_to_commit()?
+ }
+ None => {
+ let head_ref = repo.head()?;
+ head_ref.peel_to_commit()?
+ }
+ };
+ Ok(head_commit)
+}
+
+/// Gets the referenced commit to compare if version bump needed.
+///
+/// * When merging into nightly, check the version with beta branch
+/// * When merging into beta, check the version with stable branch
+/// * When merging into stable, check against crates.io registry directly
+fn get_referenced_commit<'a>(
+ repo: &'a git2::Repository,
+ base: &git2::Commit<'a>,
+) -> CargoResult<Option<git2::Commit<'a>>> {
+ let [beta, stable] = beta_and_stable_branch(&repo)?;
+ let rev_id = base.id();
+ let stable_commit = stable.get().peel_to_commit()?;
+ let beta_commit = beta.get().peel_to_commit()?;
+
+ let referenced_commit = if rev_id == stable_commit.id() {
+ None
+ } else if rev_id == beta_commit.id() {
+ tracing::trace!("stable branch from `{}`", stable.name().unwrap().unwrap());
+ Some(stable_commit)
+ } else {
+ tracing::trace!("beta branch from `{}`", beta.name().unwrap().unwrap());
+ Some(beta_commit)
+ };
+
+ Ok(referenced_commit)
+}
+
+/// Get the current beta and stable branch in cargo repository.
+///
+/// Assumptions:
+///
+/// * The repository contains the full history of `<remote>/rust-1.*.0` branches.
+/// * The version part of `<remote>/rust-1.*.0` always ends with a zero.
+/// * The maximum version is for beta channel, and the second one is for stable.
+fn beta_and_stable_branch(repo: &git2::Repository) -> CargoResult<[git2::Branch<'_>; 2]> {
+ let mut release_branches = Vec::new();
+ for branch in repo.branches(Some(git2::BranchType::Remote))? {
+ let (branch, _) = branch?;
+ let name = branch.name()?.unwrap();
+ let Some((_, version)) = name.split_once("/rust-") else {
+ tracing::trace!("branch `{name}` is not in the format of `<remote>/rust-<semver>`");
+ continue;
+ };
+ let Ok(version) = version.to_semver() else {
+ tracing::trace!("branch `{name}` is not a valid semver: `{version}`");
+ continue;
+ };
+ release_branches.push((version, branch));
+ }
+ release_branches.sort_unstable_by(|a, b| a.0.cmp(&b.0));
+ release_branches.dedup_by(|a, b| a.0 == b.0);
+
+ let beta = release_branches.pop().unwrap();
+ let stable = release_branches.pop().unwrap();
+
+ assert_eq!(beta.0.major, 1);
+ assert_eq!(beta.0.patch, 0);
+ assert_eq!(stable.0.major, 1);
+ assert_eq!(stable.0.patch, 0);
+ assert_ne!(beta.0.minor, stable.0.minor);
+
+ Ok([beta.1, stable.1])
+}
+
+/// Lists all changed workspace members between two commits.
+fn changed<'r, 'ws>(
+ ws: &'ws Workspace<'_>,
+ repo: &'r git2::Repository,
+ base_commit: &git2::Commit<'r>,
+ head: &git2::Commit<'r>,
+) -> CargoResult<HashMap<&'ws str, &'ws Package>> {
+ let root_pkg_name = ws.current()?.name(); // `cargo` crate.
+ let ws_members = ws
+ .members()
+ .filter(|pkg| pkg.name() != root_pkg_name) // Only take care of sub crates here.
+ .filter(|pkg| pkg.publish() != &Some(vec![])) // filter out `publish = false`
+ .map(|pkg| {
+ // Having relative package root path so that we can compare with
+ // paths of changed files to determine which package has changed.
+ let relative_pkg_root = pkg.root().strip_prefix(ws.root()).unwrap();
+ (relative_pkg_root, pkg)
+ })
+ .collect::<Vec<_>>();
+ let base_tree = base_commit.as_object().peel_to_tree()?;
+ let head_tree = head.as_object().peel_to_tree()?;
+ let diff = repo.diff_tree_to_tree(Some(&base_tree), Some(&head_tree), Default::default())?;
+
+ let mut changed_members = HashMap::new();
+
+ for delta in diff.deltas() {
+ let old = delta.old_file().path().unwrap();
+ let new = delta.new_file().path().unwrap();
+ for (ref pkg_root, pkg) in ws_members.iter() {
+ if old.starts_with(pkg_root) || new.starts_with(pkg_root) {
+ changed_members.insert(pkg.name().as_str(), *pkg);
+ break;
+ }
+ }
+ }
+
+ tracing::trace!("changed_members: {:?}", changed_members.keys());
+ Ok(changed_members)
+}
+
+/// Compares version against published crates on crates.io.
+///
+/// Assumption: We always release a version larger than all existing versions.
+fn check_crates_io<'a>(
+ config: &Config,
+ changed_members: &HashMap<&'a str, &'a Package>,
+ needs_bump: &mut Vec<&'a Package>,
+) -> CargoResult<()> {
+ let source_id = SourceId::crates_io(config)?;
+ let mut registry = PackageRegistry::new(config)?;
+ let _lock = config.acquire_package_cache_lock()?;
+ registry.lock_patches();
+ config.shell().status(
+ STATUS,
+ format_args!("compare against `{}`", source_id.display_registry_name()),
+ )?;
+ for (name, member) in changed_members {
+ let current = member.version();
+ let version_req = format!(">={current}");
+ let query = Dependency::parse(*name, Some(&version_req), source_id)?;
+ let possibilities = loop {
+ // Exact to avoid returning all for path/git
+ match registry.query_vec(&query, QueryKind::Exact) {
+ task::Poll::Ready(res) => {
+ break res?;
+ }
+ task::Poll::Pending => registry.block_until_ready()?,
+ }
+ };
+ if possibilities.is_empty() {
+ tracing::trace!("dep `{name}` has no version greater than or equal to `{current}`");
+ } else {
+ tracing::trace!(
+ "`{name}@{current}` needs a bump because its should have a version newer than crates.io: {:?}`",
+ possibilities
+ .iter()
+ .map(|s| format!("{}@{}", s.name(), s.version()))
+ .collect::<Vec<_>>(),
+ );
+ needs_bump.push(member);
+ }
+ }
+
+ Ok(())
+}
+
+/// Checkouts a temporary workspace to do further version comparsions.
+fn checkout_ws<'cfg, 'a>(
+ ws: &Workspace<'cfg>,
+ repo: &'a git2::Repository,
+ referenced_commit: &git2::Commit<'a>,
+) -> CargoResult<Workspace<'cfg>> {
+ let repo_path = repo.path().as_os_str().to_str().unwrap();
+ // Put it under `target/cargo-<short-id>`
+ let short_id = &referenced_commit.id().to_string()[..7];
+ let checkout_path = ws.target_dir().join(format!("cargo-{short_id}"));
+ let checkout_path = checkout_path.as_path_unlocked();
+ let _ = fs::remove_dir_all(checkout_path);
+ let new_repo = git2::build::RepoBuilder::new()
+ .clone_local(git2::build::CloneLocal::Local)
+ .clone(repo_path, checkout_path)?;
+ let obj = new_repo.find_object(referenced_commit.id(), None)?;
+ new_repo.reset(&obj, git2::ResetType::Hard, None)?;
+ Workspace::new(&checkout_path.join("Cargo.toml"), ws.config())
+}
+
+#[test]
+fn verify_cli() {
+ cli().debug_assert();
+}
diff --git a/src/tools/cargo/crates/xtask-stale-label/Cargo.toml b/src/tools/cargo/crates/xtask-stale-label/Cargo.toml
index af3218e96..b1f54a2f1 100644
--- a/src/tools/cargo/crates/xtask-stale-label/Cargo.toml
+++ b/src/tools/cargo/crates/xtask-stale-label/Cargo.toml
@@ -1,7 +1,7 @@
[package]
name = "xtask-stale-label"
version = "0.0.0"
-edition = "2021"
+edition.workspace = true
publish = false
[dependencies]
diff --git a/src/tools/cargo/crates/xtask-stale-label/src/main.rs b/src/tools/cargo/crates/xtask-stale-label/src/main.rs
index 37675979c..88c044b5b 100644
--- a/src/tools/cargo/crates/xtask-stale-label/src/main.rs
+++ b/src/tools/cargo/crates/xtask-stale-label/src/main.rs
@@ -34,7 +34,7 @@ fn main() {
for (label, value) in autolabel.iter() {
let Some(trigger_files) = value.get("trigger_files") else {
- continue
+ continue;
};
let trigger_files = trigger_files.as_array().expect("an array");
let missing_files: Vec<_> = trigger_files
diff --git a/src/tools/cargo/crates/xtask-unpublished/Cargo.toml b/src/tools/cargo/crates/xtask-unpublished/Cargo.toml
deleted file mode 100644
index 541a34dea..000000000
--- a/src/tools/cargo/crates/xtask-unpublished/Cargo.toml
+++ /dev/null
@@ -1,12 +0,0 @@
-[package]
-name = "xtask-unpublished"
-version = "0.0.0"
-edition = "2021"
-publish = false
-
-[dependencies]
-anyhow.workspace = true
-cargo.workspace = true
-clap.workspace = true
-env_logger.workspace = true
-log.workspace = true
diff --git a/src/tools/cargo/crates/xtask-unpublished/src/xtask.rs b/src/tools/cargo/crates/xtask-unpublished/src/xtask.rs
deleted file mode 100644
index f1086951f..000000000
--- a/src/tools/cargo/crates/xtask-unpublished/src/xtask.rs
+++ /dev/null
@@ -1,200 +0,0 @@
-//! `xtask-unpublished` outputs a table with publish status --- a local version
-//! and a version on crates.io for comparisons.
-//!
-//! This aims to help developers check if there is any crate required a new
-//! publish, as well as detect if a version bump is needed in CI pipeline.
-
-use std::collections::HashSet;
-
-use cargo::core::registry::PackageRegistry;
-use cargo::core::QueryKind;
-use cargo::core::Registry;
-use cargo::core::SourceId;
-use cargo::ops::Packages;
-use cargo::util::command_prelude::*;
-
-pub fn cli() -> clap::Command {
- clap::Command::new("xtask-unpublished")
- .arg_package_spec_simple("Package to inspect the published status")
- .arg(
- opt(
- "verbose",
- "Use verbose output (-vv very verbose/build.rs output)",
- )
- .short('v')
- .action(ArgAction::Count)
- .global(true),
- )
- .arg_quiet()
- .arg(
- opt("color", "Coloring: auto, always, never")
- .value_name("WHEN")
- .global(true),
- )
- .arg(flag("frozen", "Require Cargo.lock and cache are up to date").global(true))
- .arg(flag("locked", "Require Cargo.lock is up to date").global(true))
- .arg(flag("offline", "Run without accessing the network").global(true))
- .arg(multi_opt("config", "KEY=VALUE", "Override a configuration value").global(true))
- .arg(
- Arg::new("unstable-features")
- .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
- .short('Z')
- .value_name("FLAG")
- .action(ArgAction::Append)
- .global(true),
- )
-}
-
-pub fn exec(args: &clap::ArgMatches, config: &mut cargo::util::Config) -> cargo::CliResult {
- config_configure(config, args)?;
-
- unpublished(args, config)?;
-
- Ok(())
-}
-
-fn config_configure(config: &mut Config, args: &ArgMatches) -> CliResult {
- let verbose = args.verbose();
- // quiet is unusual because it is redefined in some subcommands in order
- // to provide custom help text.
- let quiet = args.flag("quiet");
- let color = args.get_one::<String>("color").map(String::as_str);
- let frozen = args.flag("frozen");
- let locked = args.flag("locked");
- let offline = args.flag("offline");
- let mut unstable_flags = vec![];
- if let Some(values) = args.get_many::<String>("unstable-features") {
- unstable_flags.extend(values.cloned());
- }
- let mut config_args = vec![];
- if let Some(values) = args.get_many::<String>("config") {
- config_args.extend(values.cloned());
- }
- config.configure(
- verbose,
- quiet,
- color,
- frozen,
- locked,
- offline,
- &None,
- &unstable_flags,
- &config_args,
- )?;
- Ok(())
-}
-
-fn unpublished(args: &clap::ArgMatches, config: &mut cargo::util::Config) -> cargo::CliResult {
- let ws = args.workspace(config)?;
-
- let members_to_inspect: HashSet<_> = {
- let pkgs = args.packages_from_flags()?;
- if let Packages::Packages(_) = pkgs {
- HashSet::from_iter(pkgs.get_packages(&ws)?)
- } else {
- HashSet::from_iter(ws.members())
- }
- };
-
- let mut results = Vec::new();
- {
- let mut registry = PackageRegistry::new(config)?;
- let _lock = config.acquire_package_cache_lock()?;
- registry.lock_patches();
- let source_id = SourceId::crates_io(config)?;
-
- for member in members_to_inspect {
- let name = member.name();
- let current = member.version();
- if member.publish() == &Some(vec![]) {
- log::trace!("skipping {name}, `publish = false`");
- continue;
- }
-
- let version_req = format!("<={current}");
- let query =
- cargo::core::dependency::Dependency::parse(name, Some(&version_req), source_id)?;
- let possibilities = loop {
- // Exact to avoid returning all for path/git
- match registry.query_vec(&query, QueryKind::Exact) {
- std::task::Poll::Ready(res) => {
- break res?;
- }
- std::task::Poll::Pending => registry.block_until_ready()?,
- }
- };
- let (last, published) = possibilities
- .iter()
- .map(|s| s.version())
- .max()
- .map(|last| (last.to_string(), last == current))
- .unwrap_or(("-".to_string(), false));
-
- results.push(vec![
- name.to_string(),
- last,
- current.to_string(),
- if published { "yes" } else { "no" }.to_string(),
- ]);
- }
- }
- results.sort();
-
- if results.is_empty() {
- return Ok(());
- }
-
- results.insert(
- 0,
- vec![
- "name".to_owned(),
- "crates.io".to_owned(),
- "local".to_owned(),
- "published?".to_owned(),
- ],
- );
-
- output_table(results);
-
- Ok(())
-}
-
-/// Outputs a markdown table like this.
-///
-/// ```text
-/// | name | crates.io | local | published? |
-/// |------------------|-----------|--------|------------|
-/// | cargo | 0.70.1 | 0.72.0 | no |
-/// | cargo-platform | 0.1.2 | 0.1.2 | yes |
-/// | cargo-util | - | 0.2.4 | no |
-/// | crates-io | 0.36.0 | 0.36.0 | yes |
-/// | home | - | 0.5.6 | no |
-/// ```
-fn output_table(table: Vec<Vec<String>>) {
- let header = table.first().unwrap();
- let paddings = table.iter().fold(vec![0; header.len()], |mut widths, row| {
- for (width, field) in widths.iter_mut().zip(row) {
- *width = usize::max(*width, field.len());
- }
- widths
- });
-
- let print = |row: &[_]| {
- for (field, pad) in row.iter().zip(&paddings) {
- print!("| {field:pad$} ");
- }
- println!("|");
- };
-
- print(header);
-
- paddings.iter().for_each(|fill| print!("|-{:-<fill$}-", ""));
- println!("|");
-
- table.iter().skip(1).for_each(|r| print(r));
-}
-
-#[test]
-fn verify_cli() {
- cli().debug_assert();
-}
diff --git a/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml b/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml
index 8db40e577..a607e6da1 100644
--- a/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-1password/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-credential-1password"
-version = "0.2.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.3.0"
+edition.workspace = true
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = "A Cargo credential process that stores tokens in a 1password vault."
diff --git a/src/tools/cargo/credential/cargo-credential-1password/src/main.rs b/src/tools/cargo/credential/cargo-credential-1password/src/main.rs
index 4f512b717..a2607fd2f 100644
--- a/src/tools/cargo/credential/cargo-credential-1password/src/main.rs
+++ b/src/tools/cargo/credential/cargo-credential-1password/src/main.rs
@@ -1,6 +1,8 @@
//! Cargo registry 1password credential process.
-use cargo_credential::{Credential, Error};
+use cargo_credential::{
+ Action, CacheControl, Credential, CredentialResponse, Error, RegistryInfo, Secret,
+};
use serde::Deserialize;
use std::io::Read;
use std::process::{Command, Stdio};
@@ -38,13 +40,13 @@ struct Url {
}
impl OnePasswordKeychain {
- fn new() -> Result<OnePasswordKeychain, Error> {
- let mut args = std::env::args().skip(1);
+ fn new(args: &[&str]) -> Result<OnePasswordKeychain, Error> {
+ let mut args = args.iter();
let mut action = false;
let mut account = None;
let mut vault = None;
while let Some(arg) = args.next() {
- match arg.as_str() {
+ match *arg {
"--account" => {
account = Some(args.next().ok_or("--account needs an arg")?);
}
@@ -63,7 +65,10 @@ impl OnePasswordKeychain {
}
}
}
- Ok(OnePasswordKeychain { account, vault })
+ Ok(OnePasswordKeychain {
+ account: account.map(|s| s.to_string()),
+ vault: vault.map(|s| s.to_string()),
+ })
}
fn signin(&self) -> Result<Option<String>, Error> {
@@ -73,9 +78,8 @@ impl OnePasswordKeychain {
return Ok(None);
}
let mut cmd = Command::new("op");
- cmd.args(&["signin", "--raw"]);
+ cmd.args(["signin", "--raw"]);
cmd.stdout(Stdio::piped());
- self.with_tty(&mut cmd)?;
let mut child = cmd
.spawn()
.map_err(|e| format!("failed to spawn `op`: {}", e))?;
@@ -121,19 +125,6 @@ impl OnePasswordKeychain {
cmd
}
- fn with_tty(&self, cmd: &mut Command) -> Result<(), Error> {
- #[cfg(unix)]
- const IN_DEVICE: &str = "/dev/tty";
- #[cfg(windows)]
- const IN_DEVICE: &str = "CONIN$";
- let stdin = std::fs::OpenOptions::new()
- .read(true)
- .write(true)
- .open(IN_DEVICE)?;
- cmd.stdin(stdin);
- Ok(())
- }
-
fn run_cmd(&self, mut cmd: Command) -> Result<String, Error> {
cmd.stdout(Stdio::piped());
let mut child = cmd
@@ -196,12 +187,12 @@ impl OnePasswordKeychain {
&self,
session: &Option<String>,
id: &str,
- token: &str,
+ token: Secret<&str>,
_name: Option<&str>,
) -> Result<(), Error> {
let cmd = self.make_cmd(
session,
- &["item", "edit", id, &format!("password={}", token)],
+ &["item", "edit", id, &format!("password={}", token.expose())],
);
self.run_cmd(cmd)?;
Ok(())
@@ -211,21 +202,21 @@ impl OnePasswordKeychain {
&self,
session: &Option<String>,
index_url: &str,
- token: &str,
+ token: Secret<&str>,
name: Option<&str>,
) -> Result<(), Error> {
let title = match name {
Some(name) => format!("Cargo registry token for {}", name),
None => "Cargo registry token".to_string(),
};
- let mut cmd = self.make_cmd(
+ let cmd = self.make_cmd(
session,
&[
"item",
"create",
"--category",
"Login",
- &format!("password={}", token),
+ &format!("password={}", token.expose()),
&format!("url={}", index_url),
"--title",
&title,
@@ -233,15 +224,11 @@ impl OnePasswordKeychain {
CARGO_TAG,
],
);
- // For unknown reasons, `op item create` seems to not be happy if
- // stdin is not a tty. Otherwise it returns with a 0 exit code without
- // doing anything.
- self.with_tty(&mut cmd)?;
self.run_cmd(cmd)?;
Ok(())
}
- fn get_token(&self, session: &Option<String>, id: &str) -> Result<String, Error> {
+ fn get_token(&self, session: &Option<String>, id: &str) -> Result<Secret<String>, Error> {
let cmd = self.make_cmd(session, &["item", "get", "--format=json", id]);
let buffer = self.run_cmd(cmd)?;
let item: Login = serde_json::from_str(&buffer)
@@ -250,7 +237,8 @@ impl OnePasswordKeychain {
match password {
Some(password) => password
.value
- .ok_or_else(|| format!("missing password value for entry").into()),
+ .map(Secret::from)
+ .ok_or("missing password value for entry".into()),
None => Err("could not find password field".into()),
}
}
@@ -262,53 +250,58 @@ impl OnePasswordKeychain {
}
}
-impl Credential for OnePasswordKeychain {
- fn name(&self) -> &'static str {
- env!("CARGO_PKG_NAME")
- }
-
- fn get(&self, index_url: &str) -> Result<String, Error> {
- let session = self.signin()?;
- if let Some(id) = self.search(&session, index_url)? {
- self.get_token(&session, &id)
- } else {
- return Err(format!(
- "no 1password entry found for registry `{}`, try `cargo login` to add a token",
- index_url
- )
- .into());
- }
- }
-
- fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
- let session = self.signin()?;
- // Check if an item already exists.
- if let Some(id) = self.search(&session, index_url)? {
- self.modify(&session, &id, token, name)
- } else {
- self.create(&session, index_url, token, name)
- }
- }
+pub struct OnePasswordCredential {}
- fn erase(&self, index_url: &str) -> Result<(), Error> {
- let session = self.signin()?;
- // Check if an item already exists.
- if let Some(id) = self.search(&session, index_url)? {
- self.delete(&session, &id)?;
- } else {
- eprintln!("not currently logged in to `{}`", index_url);
+impl Credential for OnePasswordCredential {
+ fn perform(
+ &self,
+ registry: &RegistryInfo,
+ action: &Action,
+ args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ let op = OnePasswordKeychain::new(args)?;
+ match action {
+ Action::Get(_) => {
+ let session = op.signin()?;
+ if let Some(id) = op.search(&session, registry.index_url)? {
+ op.get_token(&session, &id)
+ .map(|token| CredentialResponse::Get {
+ token,
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ } else {
+ Err(Error::NotFound)
+ }
+ }
+ Action::Login(options) => {
+ let session = op.signin()?;
+ // Check if an item already exists.
+ if let Some(id) = op.search(&session, registry.index_url)? {
+ eprintln!("note: token already exists for `{}`", registry.index_url);
+ let token = cargo_credential::read_token(options, registry)?;
+ op.modify(&session, &id, token.as_deref(), None)?;
+ } else {
+ let token = cargo_credential::read_token(options, registry)?;
+ op.create(&session, registry.index_url, token.as_deref(), None)?;
+ }
+ Ok(CredentialResponse::Login)
+ }
+ Action::Logout => {
+ let session = op.signin()?;
+ // Check if an item already exists.
+ if let Some(id) = op.search(&session, registry.index_url)? {
+ op.delete(&session, &id)?;
+ Ok(CredentialResponse::Logout)
+ } else {
+ Err(Error::NotFound)
+ }
+ }
+ _ => Err(Error::OperationNotSupported),
}
- Ok(())
}
}
fn main() {
- let op = match OnePasswordKeychain::new() {
- Ok(op) => op,
- Err(e) => {
- eprintln!("error: {}", e);
- std::process::exit(1);
- }
- };
- cargo_credential::main(op);
+ cargo_credential::main(OnePasswordCredential {});
}
diff --git a/src/tools/cargo/credential/cargo-credential-gnome-secret/build.rs b/src/tools/cargo/credential/cargo-credential-gnome-secret/build.rs
deleted file mode 100644
index 8bb86ee43..000000000
--- a/src/tools/cargo/credential/cargo-credential-gnome-secret/build.rs
+++ /dev/null
@@ -1,8 +0,0 @@
-fn main() {
- if cfg!(target_os = "linux") {
- // TODO: Consider ignoring errors when libsecret is not installed and
- // switching the impl to UnsupportedCredential (possibly along with a
- // warning?).
- pkg_config::probe_library("libsecret-1").unwrap();
- }
-}
diff --git a/src/tools/cargo/credential/cargo-credential-gnome-secret/src/libsecret.rs b/src/tools/cargo/credential/cargo-credential-gnome-secret/src/libsecret.rs
deleted file mode 100644
index c584eeecf..000000000
--- a/src/tools/cargo/credential/cargo-credential-gnome-secret/src/libsecret.rs
+++ /dev/null
@@ -1,190 +0,0 @@
-//! Implementation of the libsecret credential helper.
-
-use cargo_credential::{Credential, Error};
-use std::ffi::{CStr, CString};
-use std::os::raw::{c_char, c_int};
-use std::ptr::{null, null_mut};
-
-#[allow(non_camel_case_types)]
-type gchar = c_char;
-
-#[allow(non_camel_case_types)]
-type gboolean = c_int;
-
-type GQuark = u32;
-
-#[repr(C)]
-struct GError {
- domain: GQuark,
- code: c_int,
- message: *mut gchar,
-}
-
-#[repr(C)]
-struct GCancellable {
- _private: [u8; 0],
-}
-
-#[repr(C)]
-struct SecretSchema {
- name: *const gchar,
- flags: SecretSchemaFlags,
- attributes: [SecretSchemaAttribute; 32],
-}
-
-#[repr(C)]
-#[derive(Copy, Clone)]
-struct SecretSchemaAttribute {
- name: *const gchar,
- attr_type: SecretSchemaAttributeType,
-}
-
-#[repr(C)]
-enum SecretSchemaFlags {
- None = 0,
-}
-
-#[repr(C)]
-#[derive(Copy, Clone)]
-enum SecretSchemaAttributeType {
- String = 0,
-}
-
-extern "C" {
- fn secret_password_store_sync(
- schema: *const SecretSchema,
- collection: *const gchar,
- label: *const gchar,
- password: *const gchar,
- cancellable: *mut GCancellable,
- error: *mut *mut GError,
- ...
- ) -> gboolean;
- fn secret_password_clear_sync(
- schema: *const SecretSchema,
- cancellable: *mut GCancellable,
- error: *mut *mut GError,
- ...
- ) -> gboolean;
- fn secret_password_lookup_sync(
- schema: *const SecretSchema,
- cancellable: *mut GCancellable,
- error: *mut *mut GError,
- ...
- ) -> *mut gchar;
-}
-
-pub struct GnomeSecret;
-
-fn label(index_url: &str) -> CString {
- CString::new(format!("cargo-registry:{}", index_url)).unwrap()
-}
-
-fn schema() -> SecretSchema {
- let mut attributes = [SecretSchemaAttribute {
- name: null(),
- attr_type: SecretSchemaAttributeType::String,
- }; 32];
- attributes[0] = SecretSchemaAttribute {
- name: b"url\0".as_ptr() as *const gchar,
- attr_type: SecretSchemaAttributeType::String,
- };
- SecretSchema {
- name: b"org.rust-lang.cargo.registry\0".as_ptr() as *const gchar,
- flags: SecretSchemaFlags::None,
- attributes,
- }
-}
-
-impl Credential for GnomeSecret {
- fn name(&self) -> &'static str {
- env!("CARGO_PKG_NAME")
- }
-
- fn get(&self, index_url: &str) -> Result<String, Error> {
- let mut error: *mut GError = null_mut();
- let attr_url = CString::new("url").unwrap();
- let index_url_c = CString::new(index_url).unwrap();
- let schema = schema();
- unsafe {
- let token_c = secret_password_lookup_sync(
- &schema,
- null_mut(),
- &mut error,
- attr_url.as_ptr(),
- index_url_c.as_ptr(),
- null() as *const gchar,
- );
- if !error.is_null() {
- return Err(format!(
- "failed to get token: {}",
- CStr::from_ptr((*error).message).to_str()?
- )
- .into());
- }
- if token_c.is_null() {
- return Err(format!("cannot find token for {}", index_url).into());
- }
- let token = CStr::from_ptr(token_c)
- .to_str()
- .map_err(|e| format!("expected utf8 token: {}", e))?
- .to_string();
- Ok(token)
- }
- }
-
- fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
- let label = label(name.unwrap_or(index_url));
- let token = CString::new(token).unwrap();
- let mut error: *mut GError = null_mut();
- let attr_url = CString::new("url").unwrap();
- let index_url_c = CString::new(index_url).unwrap();
- let schema = schema();
- unsafe {
- secret_password_store_sync(
- &schema,
- b"default\0".as_ptr() as *const gchar,
- label.as_ptr(),
- token.as_ptr(),
- null_mut(),
- &mut error,
- attr_url.as_ptr(),
- index_url_c.as_ptr(),
- null() as *const gchar,
- );
- if !error.is_null() {
- return Err(format!(
- "failed to store token: {}",
- CStr::from_ptr((*error).message).to_str()?
- )
- .into());
- }
- }
- Ok(())
- }
-
- fn erase(&self, index_url: &str) -> Result<(), Error> {
- let schema = schema();
- let mut error: *mut GError = null_mut();
- let attr_url = CString::new("url").unwrap();
- let index_url_c = CString::new(index_url).unwrap();
- unsafe {
- secret_password_clear_sync(
- &schema,
- null_mut(),
- &mut error,
- attr_url.as_ptr(),
- index_url_c.as_ptr(),
- null() as *const gchar,
- );
- if !error.is_null() {
- return Err(format!(
- "failed to erase token: {}",
- CStr::from_ptr((*error).message).to_str()?
- )
- .into());
- }
- }
- Ok(())
- }
-}
diff --git a/src/tools/cargo/credential/cargo-credential-gnome-secret/src/main.rs b/src/tools/cargo/credential/cargo-credential-gnome-secret/src/main.rs
deleted file mode 100644
index 1d2ecc61f..000000000
--- a/src/tools/cargo/credential/cargo-credential-gnome-secret/src/main.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-//! Cargo registry gnome libsecret credential process.
-
-#[cfg(target_os = "linux")]
-mod libsecret;
-#[cfg(not(target_os = "linux"))]
-use cargo_credential::UnsupportedCredential as GnomeSecret;
-#[cfg(target_os = "linux")]
-use libsecret::GnomeSecret;
-
-fn main() {
- cargo_credential::main(GnomeSecret);
-}
diff --git a/src/tools/cargo/credential/cargo-credential-gnome-secret/Cargo.toml b/src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml
index 63b3e95cc..1bd4bb7d0 100644
--- a/src/tools/cargo/credential/cargo-credential-gnome-secret/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-libsecret/Cargo.toml
@@ -1,13 +1,12 @@
[package]
-name = "cargo-credential-gnome-secret"
-version = "0.2.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+name = "cargo-credential-libsecret"
+version = "0.3.1"
+edition.workspace = true
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = "A Cargo credential process that stores tokens with GNOME libsecret."
[dependencies]
+anyhow.workspace = true
cargo-credential.workspace = true
-
-[build-dependencies]
-pkg-config.workspace = true
+libloading.workspace = true
diff --git a/src/tools/cargo/credential/cargo-credential-gnome-secret/README.md b/src/tools/cargo/credential/cargo-credential-libsecret/README.md
index 7a4b02838..f169323e0 100644
--- a/src/tools/cargo/credential/cargo-credential-gnome-secret/README.md
+++ b/src/tools/cargo/credential/cargo-credential-libsecret/README.md
@@ -1,4 +1,4 @@
-# cargo-credential-gnome-secret
+# cargo-credential-libsecret
This is the implementation for the Cargo credential helper for [GNOME libsecret].
See the [credential-process] documentation for how to use this.
diff --git a/src/tools/cargo/credential/cargo-credential-libsecret/src/lib.rs b/src/tools/cargo/credential/cargo-credential-libsecret/src/lib.rs
new file mode 100644
index 000000000..f83b424ee
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential-libsecret/src/lib.rs
@@ -0,0 +1,235 @@
+#[cfg(target_os = "linux")]
+mod linux {
+ //! Implementation of the libsecret credential helper.
+
+ use anyhow::Context;
+ use cargo_credential::{
+ read_token, Action, CacheControl, Credential, CredentialResponse, Error, RegistryInfo,
+ Secret,
+ };
+ use libloading::{Library, Symbol};
+ use std::ffi::{CStr, CString};
+ use std::os::raw::{c_char, c_int};
+ use std::ptr::{null, null_mut};
+
+ #[allow(non_camel_case_types)]
+ type gchar = c_char;
+
+ #[allow(non_camel_case_types)]
+ type gboolean = c_int;
+
+ type GQuark = u32;
+
+ #[repr(C)]
+ struct GError {
+ domain: GQuark,
+ code: c_int,
+ message: *mut gchar,
+ }
+
+ #[repr(C)]
+ struct GCancellable {
+ _private: [u8; 0],
+ }
+
+ #[repr(C)]
+ struct SecretSchema {
+ name: *const gchar,
+ flags: SecretSchemaFlags,
+ attributes: [SecretSchemaAttribute; 32],
+ }
+
+ #[repr(C)]
+ #[derive(Copy, Clone)]
+ struct SecretSchemaAttribute {
+ name: *const gchar,
+ attr_type: SecretSchemaAttributeType,
+ }
+
+ #[repr(C)]
+ enum SecretSchemaFlags {
+ None = 0,
+ }
+
+ #[repr(C)]
+ #[derive(Copy, Clone)]
+ enum SecretSchemaAttributeType {
+ String = 0,
+ }
+
+ type SecretPasswordStoreSync = extern "C" fn(
+ schema: *const SecretSchema,
+ collection: *const gchar,
+ label: *const gchar,
+ password: *const gchar,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> gboolean;
+ type SecretPasswordClearSync = extern "C" fn(
+ schema: *const SecretSchema,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> gboolean;
+ type SecretPasswordLookupSync = extern "C" fn(
+ schema: *const SecretSchema,
+ cancellable: *mut GCancellable,
+ error: *mut *mut GError,
+ ...
+ ) -> *mut gchar;
+
+ pub struct LibSecretCredential;
+
+ fn label(index_url: &str) -> CString {
+ CString::new(format!("cargo-registry:{}", index_url)).unwrap()
+ }
+
+ fn schema() -> SecretSchema {
+ let mut attributes = [SecretSchemaAttribute {
+ name: null(),
+ attr_type: SecretSchemaAttributeType::String,
+ }; 32];
+ attributes[0] = SecretSchemaAttribute {
+ name: b"url\0".as_ptr() as *const gchar,
+ attr_type: SecretSchemaAttributeType::String,
+ };
+ SecretSchema {
+ name: b"org.rust-lang.cargo.registry\0".as_ptr() as *const gchar,
+ flags: SecretSchemaFlags::None,
+ attributes,
+ }
+ }
+
+ impl Credential for LibSecretCredential {
+ fn perform(
+ &self,
+ registry: &RegistryInfo,
+ action: &Action,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ // Dynamically load libsecret to avoid users needing to install
+ // additional -dev packages when building this provider.
+ let lib;
+ let secret_password_lookup_sync: Symbol<SecretPasswordLookupSync>;
+ let secret_password_store_sync: Symbol<SecretPasswordStoreSync>;
+ let secret_password_clear_sync: Symbol<SecretPasswordClearSync>;
+ unsafe {
+ lib = Library::new("libsecret-1.so").context(
+ "failed to load libsecret: try installing the `libsecret` \
+ or `libsecret-1-0` package with the system package manager",
+ )?;
+ secret_password_lookup_sync = lib
+ .get(b"secret_password_lookup_sync\0")
+ .map_err(Box::new)?;
+ secret_password_store_sync =
+ lib.get(b"secret_password_store_sync\0").map_err(Box::new)?;
+ secret_password_clear_sync =
+ lib.get(b"secret_password_clear_sync\0").map_err(Box::new)?;
+ }
+
+ let index_url_c = CString::new(registry.index_url).unwrap();
+ match action {
+ cargo_credential::Action::Get(_) => {
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let schema = schema();
+ unsafe {
+ let token_c = secret_password_lookup_sync(
+ &schema,
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to get token: {}",
+ CStr::from_ptr((*error).message)
+ .to_str()
+ .unwrap_or_default()
+ )
+ .into());
+ }
+ if token_c.is_null() {
+ return Err(Error::NotFound);
+ }
+ let token = Secret::from(
+ CStr::from_ptr(token_c)
+ .to_str()
+ .map_err(|e| format!("expected utf8 token: {}", e))?
+ .to_string(),
+ );
+ Ok(CredentialResponse::Get {
+ token,
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ }
+ }
+ cargo_credential::Action::Login(options) => {
+ let label = label(registry.name.unwrap_or(registry.index_url));
+ let token = CString::new(read_token(options, registry)?.expose()).unwrap();
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ let schema = schema();
+ unsafe {
+ secret_password_store_sync(
+ &schema,
+ b"default\0".as_ptr() as *const gchar,
+ label.as_ptr(),
+ token.as_ptr(),
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to store token: {}",
+ CStr::from_ptr((*error).message)
+ .to_str()
+ .unwrap_or_default()
+ )
+ .into());
+ }
+ }
+ Ok(CredentialResponse::Login)
+ }
+ cargo_credential::Action::Logout => {
+ let schema = schema();
+ let mut error: *mut GError = null_mut();
+ let attr_url = CString::new("url").unwrap();
+ unsafe {
+ secret_password_clear_sync(
+ &schema,
+ null_mut(),
+ &mut error,
+ attr_url.as_ptr(),
+ index_url_c.as_ptr(),
+ null() as *const gchar,
+ );
+ if !error.is_null() {
+ return Err(format!(
+ "failed to erase token: {}",
+ CStr::from_ptr((*error).message)
+ .to_str()
+ .unwrap_or_default()
+ )
+ .into());
+ }
+ }
+ Ok(CredentialResponse::Logout)
+ }
+ _ => Err(Error::OperationNotSupported),
+ }
+ }
+ }
+}
+
+#[cfg(not(target_os = "linux"))]
+pub use cargo_credential::UnsupportedCredential as LibSecretCredential;
+#[cfg(target_os = "linux")]
+pub use linux::LibSecretCredential;
diff --git a/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml b/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml
index 6311b71de..342c771b5 100644
--- a/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-macos-keychain/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-credential-macos-keychain"
-version = "0.2.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.3.0"
+edition.workspace = true
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = "A Cargo credential process that stores tokens in a macOS keychain."
diff --git a/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs b/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs
new file mode 100644
index 000000000..9e6d55472
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential-macos-keychain/src/lib.rs
@@ -0,0 +1,81 @@
+//! Cargo registry macos keychain credential process.
+
+#[cfg(target_os = "macos")]
+mod macos {
+ use cargo_credential::{
+ read_token, Action, CacheControl, Credential, CredentialResponse, Error, RegistryInfo,
+ };
+ use security_framework::os::macos::keychain::SecKeychain;
+
+ pub struct MacKeychain;
+
+ /// The account name is not used.
+ const ACCOUNT: &'static str = "";
+ const NOT_FOUND: i32 = -25300; // errSecItemNotFound
+
+ fn registry(index_url: &str) -> String {
+ format!("cargo-registry:{}", index_url)
+ }
+
+ impl Credential for MacKeychain {
+ fn perform(
+ &self,
+ reg: &RegistryInfo<'_>,
+ action: &Action<'_>,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ let keychain = SecKeychain::default().unwrap();
+ let service_name = registry(reg.index_url);
+ let not_found = security_framework::base::Error::from(NOT_FOUND).code();
+ match action {
+ Action::Get(_) => match keychain.find_generic_password(&service_name, ACCOUNT) {
+ Err(e) if e.code() == not_found => Err(Error::NotFound),
+ Err(e) => Err(Box::new(e).into()),
+ Ok((pass, _)) => {
+ let token = String::from_utf8(pass.as_ref().to_vec()).map_err(Box::new)?;
+ Ok(CredentialResponse::Get {
+ token: token.into(),
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ }
+ },
+ Action::Login(options) => {
+ let token = read_token(options, reg)?;
+ match keychain.find_generic_password(&service_name, ACCOUNT) {
+ Err(e) => {
+ if e.code() == not_found {
+ keychain
+ .add_generic_password(
+ &service_name,
+ ACCOUNT,
+ token.expose().as_bytes(),
+ )
+ .map_err(Box::new)?;
+ }
+ }
+ Ok((_, mut item)) => {
+ item.set_password(token.expose().as_bytes())
+ .map_err(Box::new)?;
+ }
+ }
+ Ok(CredentialResponse::Login)
+ }
+ Action::Logout => match keychain.find_generic_password(&service_name, ACCOUNT) {
+ Err(e) if e.code() == not_found => Err(Error::NotFound),
+ Err(e) => Err(Box::new(e).into()),
+ Ok((_, item)) => {
+ item.delete();
+ Ok(CredentialResponse::Logout)
+ }
+ },
+ _ => Err(Error::OperationNotSupported),
+ }
+ }
+ }
+}
+
+#[cfg(not(target_os = "macos"))]
+pub use cargo_credential::UnsupportedCredential as MacKeychain;
+#[cfg(target_os = "macos")]
+pub use macos::MacKeychain;
diff --git a/src/tools/cargo/credential/cargo-credential-macos-keychain/src/main.rs b/src/tools/cargo/credential/cargo-credential-macos-keychain/src/main.rs
deleted file mode 100644
index 4d6ea96d0..000000000
--- a/src/tools/cargo/credential/cargo-credential-macos-keychain/src/main.rs
+++ /dev/null
@@ -1,58 +0,0 @@
-//! Cargo registry macos keychain credential process.
-
-#[cfg(target_os = "macos")]
-mod macos {
- use cargo_credential::{Credential, Error};
- use security_framework::os::macos::keychain::SecKeychain;
-
- pub(crate) struct MacKeychain;
-
- /// The account name is not used.
- const ACCOUNT: &'static str = "";
-
- fn registry(registry_name: &str) -> String {
- format!("cargo-registry:{}", registry_name)
- }
-
- impl Credential for MacKeychain {
- fn name(&self) -> &'static str {
- env!("CARGO_PKG_NAME")
- }
-
- fn get(&self, index_url: &str) -> Result<String, Error> {
- let keychain = SecKeychain::default().unwrap();
- let service_name = registry(index_url);
- let (pass, _item) = keychain.find_generic_password(&service_name, ACCOUNT)?;
- String::from_utf8(pass.as_ref().to_vec())
- .map_err(|_| "failed to convert token to UTF8".into())
- }
-
- fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
- let keychain = SecKeychain::default().unwrap();
- let service_name = registry(name.unwrap_or(index_url));
- if let Ok((_pass, mut item)) = keychain.find_generic_password(&service_name, ACCOUNT) {
- item.set_password(token.as_bytes())?;
- } else {
- keychain.add_generic_password(&service_name, ACCOUNT, token.as_bytes())?;
- }
- Ok(())
- }
-
- fn erase(&self, index_url: &str) -> Result<(), Error> {
- let keychain = SecKeychain::default().unwrap();
- let service_name = registry(index_url);
- let (_pass, item) = keychain.find_generic_password(&service_name, ACCOUNT)?;
- item.delete();
- Ok(())
- }
- }
-}
-
-#[cfg(not(target_os = "macos"))]
-use cargo_credential::UnsupportedCredential as MacKeychain;
-#[cfg(target_os = "macos")]
-use macos::MacKeychain;
-
-fn main() {
- cargo_credential::main(MacKeychain);
-}
diff --git a/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml b/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml
index cd168a8a3..8c609dc4e 100644
--- a/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential-wincred/Cargo.toml
@@ -1,8 +1,8 @@
[package]
name = "cargo-credential-wincred"
-version = "0.2.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.3.0"
+edition.workspace = true
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = "A Cargo credential process that stores tokens with Windows Credential Manager."
diff --git a/src/tools/cargo/credential/cargo-credential-wincred/src/lib.rs b/src/tools/cargo/credential/cargo-credential-wincred/src/lib.rs
new file mode 100644
index 000000000..9200ca58f
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential-wincred/src/lib.rs
@@ -0,0 +1,125 @@
+//! Cargo registry windows credential process.
+
+#[cfg(windows)]
+mod win {
+ use cargo_credential::{read_token, Action, CacheControl, CredentialResponse, RegistryInfo};
+ use cargo_credential::{Credential, Error};
+ use std::ffi::OsStr;
+
+ use std::os::windows::ffi::OsStrExt;
+
+ use windows_sys::core::PWSTR;
+ use windows_sys::Win32::Foundation::ERROR_NOT_FOUND;
+ use windows_sys::Win32::Foundation::FILETIME;
+ use windows_sys::Win32::Foundation::TRUE;
+ use windows_sys::Win32::Security::Credentials::CredReadW;
+ use windows_sys::Win32::Security::Credentials::CredWriteW;
+ use windows_sys::Win32::Security::Credentials::CREDENTIALW;
+ use windows_sys::Win32::Security::Credentials::CRED_PERSIST_LOCAL_MACHINE;
+ use windows_sys::Win32::Security::Credentials::CRED_TYPE_GENERIC;
+ use windows_sys::Win32::Security::Credentials::{CredDeleteW, CredFree};
+
+ pub struct WindowsCredential;
+
+ /// Converts a string to a nul-terminated wide UTF-16 byte sequence.
+ fn wstr(s: &str) -> Vec<u16> {
+ let mut wide: Vec<u16> = OsStr::new(s).encode_wide().collect();
+ if wide.iter().any(|b| *b == 0) {
+ panic!("nul byte in wide string");
+ }
+ wide.push(0);
+ wide
+ }
+
+ fn target_name(index_url: &str) -> Vec<u16> {
+ wstr(&format!("cargo-registry:{}", index_url))
+ }
+
+ impl Credential for WindowsCredential {
+ fn perform(
+ &self,
+ registry: &RegistryInfo,
+ action: &Action,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ match action {
+ Action::Get(_) => {
+ let target_name = target_name(registry.index_url);
+ let mut p_credential: *mut CREDENTIALW = std::ptr::null_mut() as *mut _;
+ let bytes = unsafe {
+ if CredReadW(
+ target_name.as_ptr(),
+ CRED_TYPE_GENERIC,
+ 0,
+ &mut p_credential as *mut _,
+ ) != TRUE
+ {
+ let err = std::io::Error::last_os_error();
+ if err.raw_os_error() == Some(ERROR_NOT_FOUND as i32) {
+ return Err(Error::NotFound);
+ }
+ return Err(Box::new(err).into());
+ }
+ std::slice::from_raw_parts(
+ (*p_credential).CredentialBlob,
+ (*p_credential).CredentialBlobSize as usize,
+ )
+ };
+ let token = String::from_utf8(bytes.to_vec()).map_err(Box::new);
+ unsafe { CredFree(p_credential as *mut _) };
+ Ok(CredentialResponse::Get {
+ token: token?.into(),
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ }
+ Action::Login(options) => {
+ let token = read_token(options, registry)?.expose();
+ let target_name = target_name(registry.index_url);
+ let comment = wstr("Cargo registry token");
+ let credential = CREDENTIALW {
+ Flags: 0,
+ Type: CRED_TYPE_GENERIC,
+ TargetName: target_name.as_ptr() as PWSTR,
+ Comment: comment.as_ptr() as PWSTR,
+ LastWritten: FILETIME {
+ dwLowDateTime: 0,
+ dwHighDateTime: 0,
+ },
+ CredentialBlobSize: token.len() as u32,
+ CredentialBlob: token.as_bytes().as_ptr() as *mut u8,
+ Persist: CRED_PERSIST_LOCAL_MACHINE,
+ AttributeCount: 0,
+ Attributes: std::ptr::null_mut(),
+ TargetAlias: std::ptr::null_mut(),
+ UserName: std::ptr::null_mut(),
+ };
+ let result = unsafe { CredWriteW(&credential, 0) };
+ if result != TRUE {
+ let err = std::io::Error::last_os_error();
+ return Err(Box::new(err).into());
+ }
+ Ok(CredentialResponse::Login)
+ }
+ Action::Logout => {
+ let target_name = target_name(registry.index_url);
+ let result = unsafe { CredDeleteW(target_name.as_ptr(), CRED_TYPE_GENERIC, 0) };
+ if result != TRUE {
+ let err = std::io::Error::last_os_error();
+ if err.raw_os_error() == Some(ERROR_NOT_FOUND as i32) {
+ return Err(Error::NotFound);
+ }
+ return Err(Box::new(err).into());
+ }
+ Ok(CredentialResponse::Logout)
+ }
+ _ => Err(Error::OperationNotSupported),
+ }
+ }
+ }
+}
+
+#[cfg(not(windows))]
+pub use cargo_credential::UnsupportedCredential as WindowsCredential;
+#[cfg(windows)]
+pub use win::WindowsCredential;
diff --git a/src/tools/cargo/credential/cargo-credential-wincred/src/main.rs b/src/tools/cargo/credential/cargo-credential-wincred/src/main.rs
deleted file mode 100644
index 4377172e8..000000000
--- a/src/tools/cargo/credential/cargo-credential-wincred/src/main.rs
+++ /dev/null
@@ -1,122 +0,0 @@
-//! Cargo registry windows credential process.
-
-#[cfg(windows)]
-mod win {
- use cargo_credential::{Credential, Error};
- use std::ffi::OsStr;
- use std::os::windows::ffi::OsStrExt;
-
- use windows_sys::core::PWSTR;
- use windows_sys::Win32::Foundation::ERROR_NOT_FOUND;
- use windows_sys::Win32::Foundation::FILETIME;
- use windows_sys::Win32::Foundation::TRUE;
- use windows_sys::Win32::Security::Credentials::CredDeleteW;
- use windows_sys::Win32::Security::Credentials::CredReadW;
- use windows_sys::Win32::Security::Credentials::CredWriteW;
- use windows_sys::Win32::Security::Credentials::CREDENTIALW;
- use windows_sys::Win32::Security::Credentials::CRED_PERSIST_LOCAL_MACHINE;
- use windows_sys::Win32::Security::Credentials::CRED_TYPE_GENERIC;
-
- pub(crate) struct WindowsCredential;
-
- /// Converts a string to a nul-terminated wide UTF-16 byte sequence.
- fn wstr(s: &str) -> Vec<u16> {
- let mut wide: Vec<u16> = OsStr::new(s).encode_wide().collect();
- if wide.iter().any(|b| *b == 0) {
- panic!("nul byte in wide string");
- }
- wide.push(0);
- wide
- }
-
- fn target_name(registry_name: &str) -> Vec<u16> {
- wstr(&format!("cargo-registry:{}", registry_name))
- }
-
- impl Credential for WindowsCredential {
- fn name(&self) -> &'static str {
- env!("CARGO_PKG_NAME")
- }
-
- fn get(&self, index_url: &str) -> Result<String, Error> {
- let target_name = target_name(index_url);
- let p_credential: *mut CREDENTIALW = std::ptr::null_mut() as *mut _;
- unsafe {
- if CredReadW(
- target_name.as_ptr(),
- CRED_TYPE_GENERIC,
- 0,
- p_credential as *mut _ as *mut _,
- ) != TRUE
- {
- return Err(format!(
- "failed to fetch token: {}",
- std::io::Error::last_os_error()
- )
- .into());
- }
- let bytes = std::slice::from_raw_parts(
- (*p_credential).CredentialBlob,
- (*p_credential).CredentialBlobSize as usize,
- );
- String::from_utf8(bytes.to_vec())
- .map_err(|_| "failed to convert token to UTF8".into())
- }
- }
-
- fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error> {
- let token = token.as_bytes();
- let target_name = target_name(index_url);
- let comment = match name {
- Some(name) => wstr(&format!("Cargo registry token for {}", name)),
- None => wstr("Cargo registry token"),
- };
- let mut credential = CREDENTIALW {
- Flags: 0,
- Type: CRED_TYPE_GENERIC,
- TargetName: target_name.as_ptr() as PWSTR,
- Comment: comment.as_ptr() as PWSTR,
- LastWritten: FILETIME {
- dwLowDateTime: 0,
- dwHighDateTime: 0,
- },
- CredentialBlobSize: token.len() as u32,
- CredentialBlob: token.as_ptr() as *mut u8,
- Persist: CRED_PERSIST_LOCAL_MACHINE,
- AttributeCount: 0,
- Attributes: std::ptr::null_mut(),
- TargetAlias: std::ptr::null_mut(),
- UserName: std::ptr::null_mut(),
- };
- let result = unsafe { CredWriteW(&mut credential, 0) };
- if result != TRUE {
- let err = std::io::Error::last_os_error();
- return Err(format!("failed to store token: {}", err).into());
- }
- Ok(())
- }
-
- fn erase(&self, index_url: &str) -> Result<(), Error> {
- let target_name = target_name(index_url);
- let result = unsafe { CredDeleteW(target_name.as_ptr(), CRED_TYPE_GENERIC, 0) };
- if result != TRUE {
- let err = std::io::Error::last_os_error();
- if err.raw_os_error() == Some(ERROR_NOT_FOUND as i32) {
- eprintln!("not currently logged in to `{}`", index_url);
- return Ok(());
- }
- return Err(format!("failed to remove token: {}", err).into());
- }
- Ok(())
- }
- }
-}
-
-#[cfg(not(windows))]
-use cargo_credential::UnsupportedCredential as WindowsCredential;
-#[cfg(windows)]
-use win::WindowsCredential;
-
-fn main() {
- cargo_credential::main(WindowsCredential);
-}
diff --git a/src/tools/cargo/credential/cargo-credential/Cargo.toml b/src/tools/cargo/credential/cargo-credential/Cargo.toml
index 2addaf5af..8cd1348be 100644
--- a/src/tools/cargo/credential/cargo-credential/Cargo.toml
+++ b/src/tools/cargo/credential/cargo-credential/Cargo.toml
@@ -1,9 +1,21 @@
[package]
name = "cargo-credential"
-version = "0.2.0"
-edition = "2021"
-license = "MIT OR Apache-2.0"
+version = "0.3.0"
+edition.workspace = true
+license.workspace = true
repository = "https://github.com/rust-lang/cargo"
description = "A library to assist writing Cargo credential helpers."
[dependencies]
+anyhow.workspace = true
+libc.workspace = true
+serde = { workspace = true, features = ["derive"] }
+serde_json.workspace = true
+thiserror.workspace = true
+time.workspace = true
+
+[target.'cfg(windows)'.dependencies]
+windows-sys = { workspace = true, features = ["Win32_System_Console", "Win32_Foundation"] }
+
+[dev-dependencies]
+snapbox = { workspace = true, features = ["examples"] }
diff --git a/src/tools/cargo/credential/cargo-credential/README.md b/src/tools/cargo/credential/cargo-credential/README.md
index 53dc8e6b7..049b3ba55 100644
--- a/src/tools/cargo/credential/cargo-credential/README.md
+++ b/src/tools/cargo/credential/cargo-credential/README.md
@@ -18,7 +18,7 @@ Create a Cargo project with this as a dependency:
# Add this to your Cargo.toml:
[dependencies]
-cargo-credential = "0.1"
+cargo-credential = "0.3"
```
And then include a `main.rs` binary which implements the `Credential` trait, and calls
diff --git a/src/tools/cargo/credential/cargo-credential/examples/file-provider.rs b/src/tools/cargo/credential/cargo-credential/examples/file-provider.rs
new file mode 100644
index 000000000..d11958536
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential/examples/file-provider.rs
@@ -0,0 +1,90 @@
+//! Example credential provider that stores credentials in a JSON file.
+//! This is not secure
+
+use cargo_credential::{
+ Action, CacheControl, Credential, CredentialResponse, RegistryInfo, Secret,
+};
+use std::{collections::HashMap, fs::File, io::ErrorKind};
+type Error = Box<dyn std::error::Error + Send + Sync + 'static>;
+
+struct FileCredential;
+
+impl Credential for FileCredential {
+ fn perform(
+ &self,
+ registry: &RegistryInfo,
+ action: &Action,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, cargo_credential::Error> {
+ if registry.index_url != "https://github.com/rust-lang/crates.io-index" {
+ // Restrict this provider to only work for crates.io. Cargo will skip it and attempt
+ // another provider for any other registry.
+ //
+ // If a provider supports any registry, then this check should be omitted.
+ return Err(cargo_credential::Error::UrlNotSupported);
+ }
+
+ // `Error::Other` takes a boxed `std::error::Error` type that causes Cargo to show the error.
+ let mut creds = FileCredential::read().map_err(cargo_credential::Error::Other)?;
+
+ match action {
+ Action::Get(_) => {
+ // Cargo requested a token, look it up.
+ if let Some(token) = creds.get(registry.index_url) {
+ Ok(CredentialResponse::Get {
+ token: token.clone(),
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ } else {
+ // Credential providers should respond with `NotFound` when a credential can not be
+ // found, allowing Cargo to attempt another provider.
+ Err(cargo_credential::Error::NotFound)
+ }
+ }
+ Action::Login(login_options) => {
+ // The token for `cargo login` can come from the `login_options` parameter or i
+ // interactively reading from stdin.
+ //
+ // `cargo_credential::read_token` automatically handles this.
+ let token = cargo_credential::read_token(login_options, registry)?;
+ creds.insert(registry.index_url.to_string(), token);
+
+ FileCredential::write(&creds).map_err(cargo_credential::Error::Other)?;
+
+ // Credentials were successfully stored.
+ Ok(CredentialResponse::Login)
+ }
+ Action::Logout => {
+ if creds.remove(registry.index_url).is_none() {
+ // If the user attempts to log out from a registry that has no credentials
+ // stored, then NotFound is the appropriate error.
+ Err(cargo_credential::Error::NotFound)
+ } else {
+ // Credentials were successfully erased.
+ Ok(CredentialResponse::Logout)
+ }
+ }
+ // If a credential provider doesn't support a given operation, it should respond with `OperationNotSupported`.
+ _ => Err(cargo_credential::Error::OperationNotSupported),
+ }
+ }
+}
+
+impl FileCredential {
+ fn read() -> Result<HashMap<String, Secret<String>>, Error> {
+ match File::open("cargo-credentials.json") {
+ Ok(f) => Ok(serde_json::from_reader(f)?),
+ Err(e) if e.kind() == ErrorKind::NotFound => Ok(HashMap::new()),
+ Err(e) => Err(e)?,
+ }
+ }
+ fn write(value: &HashMap<String, Secret<String>>) -> Result<(), Error> {
+ let file = File::create("cargo-credentials.json")?;
+ Ok(serde_json::to_writer_pretty(file, value)?)
+ }
+}
+
+fn main() {
+ cargo_credential::main(FileCredential);
+}
diff --git a/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs b/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs
new file mode 100644
index 000000000..0b9bcc2f7
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential/examples/stdout-redirected.rs
@@ -0,0 +1,25 @@
+//! Provider used for testing redirection of stdout.
+
+use cargo_credential::{Action, Credential, CredentialResponse, Error, RegistryInfo};
+
+struct MyCredential;
+
+impl Credential for MyCredential {
+ fn perform(
+ &self,
+ _registry: &RegistryInfo,
+ _action: &Action,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ // Informational messages should be sent on stderr.
+ eprintln!("message on stderr should be sent the the parent process");
+
+ // Reading from stdin and writing to stdout will go to the attached console (tty).
+ println!("message from test credential provider");
+ Err(Error::OperationNotSupported)
+ }
+}
+
+fn main() {
+ cargo_credential::main(MyCredential);
+}
diff --git a/src/tools/cargo/credential/cargo-credential/src/error.rs b/src/tools/cargo/credential/cargo-credential/src/error.rs
new file mode 100644
index 000000000..2ebaf9977
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential/src/error.rs
@@ -0,0 +1,206 @@
+use serde::{Deserialize, Serialize};
+use std::error::Error as StdError;
+use thiserror::Error as ThisError;
+
+/// Credential provider error type.
+///
+/// `UrlNotSupported` and `NotFound` errors both cause Cargo
+/// to attempt another provider, if one is available. The other
+/// variants are fatal.
+///
+/// Note: Do not add a tuple variant, as it cannot be serialized.
+#[derive(Serialize, Deserialize, ThisError, Debug)]
+#[serde(rename_all = "kebab-case", tag = "kind")]
+#[non_exhaustive]
+pub enum Error {
+ /// Registry URL is not supported. This should be used if
+ /// the provider only works for some registries. Cargo will
+ /// try another provider, if available
+ #[error("registry not supported")]
+ UrlNotSupported,
+
+ /// Credentials could not be found. Cargo will try another
+ /// provider, if available
+ #[error("credential not found")]
+ NotFound,
+
+ /// The provider doesn't support this operation, such as
+ /// a provider that can't support 'login' / 'logout'
+ #[error("requested operation not supported")]
+ OperationNotSupported,
+
+ /// The provider failed to perform the operation. Other
+ /// providers will not be attempted
+ #[error(transparent)]
+ #[serde(with = "error_serialize")]
+ Other(Box<dyn StdError + Sync + Send>),
+
+ /// A new variant was added to this enum since Cargo was built
+ #[error("unknown error kind; try updating Cargo?")]
+ #[serde(other)]
+ Unknown,
+}
+
+impl From<String> for Error {
+ fn from(err: String) -> Self {
+ Box::new(StringTypedError {
+ message: err.to_string(),
+ source: None,
+ })
+ .into()
+ }
+}
+
+impl From<&str> for Error {
+ fn from(err: &str) -> Self {
+ err.to_string().into()
+ }
+}
+
+impl From<anyhow::Error> for Error {
+ fn from(value: anyhow::Error) -> Self {
+ let mut prev = None;
+ for e in value.chain().rev() {
+ prev = Some(Box::new(StringTypedError {
+ message: e.to_string(),
+ source: prev,
+ }));
+ }
+ Error::Other(prev.unwrap())
+ }
+}
+
+impl<T: StdError + Send + Sync + 'static> From<Box<T>> for Error {
+ fn from(value: Box<T>) -> Self {
+ Error::Other(value)
+ }
+}
+
+/// String-based error type with an optional source
+#[derive(Debug)]
+struct StringTypedError {
+ message: String,
+ source: Option<Box<StringTypedError>>,
+}
+
+impl StdError for StringTypedError {
+ fn source(&self) -> Option<&(dyn StdError + 'static)> {
+ self.source.as_ref().map(|err| err as &dyn StdError)
+ }
+}
+
+impl std::fmt::Display for StringTypedError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.message.fmt(f)
+ }
+}
+
+/// Serializer / deserializer for any boxed error.
+/// The string representation of the error, and its `source` chain can roundtrip across
+/// the serialization. The actual types are lost (downcast will not work).
+mod error_serialize {
+ use std::error::Error as StdError;
+ use std::ops::Deref;
+
+ use serde::{ser::SerializeStruct, Deserialize, Deserializer, Serializer};
+
+ use crate::error::StringTypedError;
+
+ pub fn serialize<S>(
+ e: &Box<dyn StdError + Send + Sync>,
+ serializer: S,
+ ) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut state = serializer.serialize_struct("StringTypedError", 2)?;
+ state.serialize_field("message", &format!("{}", e))?;
+
+ // Serialize the source error chain recursively
+ let mut current_source: &dyn StdError = e.deref();
+ let mut sources = Vec::new();
+ while let Some(err) = current_source.source() {
+ sources.push(err.to_string());
+ current_source = err;
+ }
+ state.serialize_field("caused-by", &sources)?;
+ state.end()
+ }
+
+ pub fn deserialize<'de, D>(deserializer: D) -> Result<Box<dyn StdError + Sync + Send>, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ #[derive(Deserialize)]
+ #[serde(rename_all = "kebab-case")]
+ struct ErrorData {
+ message: String,
+ caused_by: Option<Vec<String>>,
+ }
+ let data = ErrorData::deserialize(deserializer)?;
+ let mut prev = None;
+ if let Some(source) = data.caused_by {
+ for e in source.into_iter().rev() {
+ prev = Some(Box::new(StringTypedError {
+ message: e,
+ source: prev,
+ }));
+ }
+ }
+ let e = Box::new(StringTypedError {
+ message: data.message,
+ source: prev,
+ });
+ Ok(e)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::Error;
+
+ #[test]
+ pub fn unknown_kind() {
+ let json = r#"{
+ "kind": "unexpected-kind",
+ "unexpected-content": "test"
+ }"#;
+ let e: Error = serde_json::from_str(&json).unwrap();
+ assert!(matches!(e, Error::Unknown));
+ }
+
+ #[test]
+ pub fn roundtrip() {
+ // Construct an error with context
+ let e = anyhow::anyhow!("E1").context("E2").context("E3");
+ // Convert to a string with contexts.
+ let s1 = format!("{:?}", e);
+ // Convert the error into an `Error`
+ let e: Error = e.into();
+ // Convert that error into JSON
+ let json = serde_json::to_string_pretty(&e).unwrap();
+ // Convert that error back to anyhow
+ let e: anyhow::Error = e.into();
+ let s2 = format!("{:?}", e);
+ assert_eq!(s1, s2);
+
+ // Convert the error back from JSON
+ let e: Error = serde_json::from_str(&json).unwrap();
+ // Convert to back to anyhow
+ let e: anyhow::Error = e.into();
+ let s3 = format!("{:?}", e);
+ assert_eq!(s2, s3);
+
+ assert_eq!(
+ r#"{
+ "kind": "other",
+ "message": "E3",
+ "caused-by": [
+ "E2",
+ "E1"
+ ]
+}"#,
+ json
+ );
+ }
+}
diff --git a/src/tools/cargo/credential/cargo-credential/src/lib.rs b/src/tools/cargo/credential/cargo-credential/src/lib.rs
index c75172242..0fb495ed3 100644
--- a/src/tools/cargo/credential/cargo-credential/src/lib.rs
+++ b/src/tools/cargo/credential/cargo-credential/src/lib.rs
@@ -1,4 +1,4 @@
-//! Helper library for writing Cargo credential processes.
+//! Helper library for writing Cargo credential providers.
//!
//! A credential process should have a `struct` that implements the `Credential` trait.
//! The `main` function should be called with an instance of that struct, such as:
@@ -9,98 +9,270 @@
//! }
//! ```
//!
-//! This will determine the action to perform (get/store/erase) by looking at
-//! the CLI arguments for the first argument that does not start with `-`. It
-//! will then call the corresponding method of the trait to perform the
-//! requested action.
-
-pub type Error = Box<dyn std::error::Error>;
+//! While in the `perform` function, stdin and stdout will be re-attached to the
+//! active console. This allows credential providers to be interactive if necessary.
+//!
+//! ## Error handling
+//! ### [`Error::UrlNotSupported`]
+//! A credential provider may only support some registry URLs. If this is the case
+//! and an unsupported index URL is passed to the provider, it should respond with
+//! [`Error::UrlNotSupported`]. Other credential providers may be attempted by Cargo.
+//!
+//! ### [`Error::NotFound`]
+//! When attempting an [`Action::Get`] or [`Action::Logout`], if a credential can not
+//! be found, the provider should respond with [`Error::NotFound`]. Other credential
+//! providers may be attempted by Cargo.
+//!
+//! ### [`Error::OperationNotSupported`]
+//! A credential provider might not support all operations. For example if the provider
+//! only supports [`Action::Get`], [`Error::OperationNotSupported`] should be returned
+//! for all other requests.
+//!
+//! ### [`Error::Other`]
+//! All other errors go here. The error will be shown to the user in Cargo, including
+//! the full error chain using [`std::error::Error::source`].
+//!
+//! ## Example
+//! ```rust,ignore
+#![doc = include_str!("../examples/file-provider.rs")]
+//! ```
-pub trait Credential {
- /// Returns the name of this credential process.
- fn name(&self) -> &'static str;
+use serde::{Deserialize, Serialize};
+use std::{fmt::Display, io};
+use time::OffsetDateTime;
- /// Retrieves a token for the given registry.
- fn get(&self, index_url: &str) -> Result<String, Error>;
+mod error;
+mod secret;
+mod stdio;
- /// Stores the given token for the given registry.
- fn store(&self, index_url: &str, token: &str, name: Option<&str>) -> Result<(), Error>;
+pub use error::Error;
+pub use secret::Secret;
+use stdio::stdin_stdout_to_console;
- /// Removes the token for the given registry.
- ///
- /// If the user is not logged in, this should print a message to stderr if
- /// possible indicating that the user is not currently logged in, and
- /// return `Ok`.
- fn erase(&self, index_url: &str) -> Result<(), Error>;
+/// Message sent by the credential helper on startup
+#[derive(Serialize, Deserialize, Clone, Debug)]
+pub struct CredentialHello {
+ // Protocol versions supported by the credential process.
+ pub v: Vec<u32>,
}
+/// Credential provider that doesn't support any registries.
pub struct UnsupportedCredential;
-
impl Credential for UnsupportedCredential {
- fn name(&self) -> &'static str {
- "unsupported"
+ fn perform(
+ &self,
+ _registry: &RegistryInfo,
+ _action: &Action,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ Err(Error::UrlNotSupported)
}
+}
- fn get(&self, _index_url: &str) -> Result<String, Error> {
- Err("unsupported".into())
- }
+/// Message sent by Cargo to the credential helper after the hello
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct CredentialRequest<'a> {
+ // Cargo will respond with the highest common protocol supported by both.
+ pub v: u32,
+ #[serde(borrow)]
+ pub registry: RegistryInfo<'a>,
+ #[serde(borrow, flatten)]
+ pub action: Action<'a>,
+ /// Additional command-line arguments passed to the credential provider.
+ pub args: Vec<&'a str>,
+}
- fn store(&self, _index_url: &str, _token: &str, _name: Option<&str>) -> Result<(), Error> {
- Err("unsupported".into())
- }
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct RegistryInfo<'a> {
+ /// Registry index url
+ pub index_url: &'a str,
+ /// Name of the registry in configuration. May not be available.
+ /// The crates.io registry will be `crates-io` (`CRATES_IO_REGISTRY`).
+ pub name: Option<&'a str>,
+ /// Headers from attempting to access a registry that resulted in a HTTP 401.
+ #[serde(skip_serializing_if = "Vec::is_empty", default)]
+ pub headers: Vec<String>,
+}
- fn erase(&self, _index_url: &str) -> Result<(), Error> {
- Err("unsupported".into())
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[non_exhaustive]
+#[serde(tag = "kind", rename_all = "kebab-case")]
+pub enum Action<'a> {
+ #[serde(borrow)]
+ Get(Operation<'a>),
+ Login(LoginOptions<'a>),
+ Logout,
+ #[serde(other)]
+ Unknown,
+}
+
+impl<'a> Display for Action<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Action::Get(_) => f.write_str("get"),
+ Action::Login(_) => f.write_str("login"),
+ Action::Logout => f.write_str("logout"),
+ Action::Unknown => f.write_str("<unknown>"),
+ }
}
}
-/// Runs the credential interaction by processing the command-line and
-/// environment variables.
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct LoginOptions<'a> {
+ /// Token passed on the command line via --token or from stdin
+ pub token: Option<Secret<&'a str>>,
+ /// Optional URL that the user can visit to log in to the registry
+ pub login_url: Option<&'a str>,
+}
+
+/// A record of what kind of operation is happening that we should generate a token for.
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[non_exhaustive]
+#[serde(tag = "operation", rename_all = "kebab-case")]
+pub enum Operation<'a> {
+ /// The user is attempting to fetch a crate.
+ Read,
+ /// The user is attempting to publish a crate.
+ Publish {
+ /// The name of the crate
+ name: &'a str,
+ /// The version of the crate
+ vers: &'a str,
+ /// The checksum of the crate file being uploaded
+ cksum: &'a str,
+ },
+ /// The user is attempting to yank a crate.
+ Yank {
+ /// The name of the crate
+ name: &'a str,
+ /// The version of the crate
+ vers: &'a str,
+ },
+ /// The user is attempting to unyank a crate.
+ Unyank {
+ /// The name of the crate
+ name: &'a str,
+ /// The version of the crate
+ vers: &'a str,
+ },
+ /// The user is attempting to modify the owners of a crate.
+ Owners {
+ /// The name of the crate
+ name: &'a str,
+ },
+ #[serde(other)]
+ Unknown,
+}
+
+/// Message sent by the credential helper
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(tag = "kind", rename_all = "kebab-case")]
+#[non_exhaustive]
+pub enum CredentialResponse {
+ Get {
+ token: Secret<String>,
+ cache: CacheControl,
+ operation_independent: bool,
+ },
+ Login,
+ Logout,
+ #[serde(other)]
+ Unknown,
+}
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+#[non_exhaustive]
+pub enum CacheControl {
+ /// Do not cache this result.
+ Never,
+ /// Cache this result and use it for subsequent requests in the current Cargo invocation until the specified time.
+ Expires(#[serde(with = "time::serde::timestamp")] OffsetDateTime),
+ /// Cache this result and use it for all subsequent requests in the current Cargo invocation.
+ Session,
+ #[serde(other)]
+ Unknown,
+}
+
+/// Credential process JSON protocol version. Incrementing
+/// this version will prevent new credential providers
+/// from working with older versions of Cargo.
+pub const PROTOCOL_VERSION_1: u32 = 1;
+pub trait Credential {
+ /// Retrieves a token for the given registry.
+ fn perform(
+ &self,
+ registry: &RegistryInfo,
+ action: &Action,
+ args: &[&str],
+ ) -> Result<CredentialResponse, Error>;
+}
+
+/// Runs the credential interaction
pub fn main(credential: impl Credential) {
- let name = credential.name();
- if let Err(e) = doit(credential) {
- eprintln!("{} error: {}", name, e);
- std::process::exit(1);
+ let result = doit(credential).map_err(|e| Error::Other(e));
+ if result.is_err() {
+ serde_json::to_writer(std::io::stdout(), &result)
+ .expect("failed to serialize credential provider error");
+ println!();
}
}
-fn env(name: &str) -> Result<String, Error> {
- std::env::var(name).map_err(|_| format!("environment variable `{}` is not set", name).into())
-}
+fn doit(
+ credential: impl Credential,
+) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
+ let hello = CredentialHello {
+ v: vec![PROTOCOL_VERSION_1],
+ };
+ serde_json::to_writer(std::io::stdout(), &hello)?;
+ println!();
-fn doit(credential: impl Credential) -> Result<(), Error> {
- let which = std::env::args()
- .skip(1)
- .skip_while(|arg| arg.starts_with('-'))
- .next()
- .ok_or_else(|| "first argument must be the {action}")?;
- let index_url = env("CARGO_REGISTRY_INDEX_URL")?;
- let name = std::env::var("CARGO_REGISTRY_NAME_OPT").ok();
- let result = match which.as_ref() {
- "get" => credential.get(&index_url).and_then(|token| {
- println!("{}", token);
- Ok(())
- }),
- "store" => {
- read_token().and_then(|token| credential.store(&index_url, &token, name.as_deref()))
+ loop {
+ let mut buffer = String::new();
+ let len = std::io::stdin().read_line(&mut buffer)?;
+ if len == 0 {
+ return Ok(());
}
- "erase" => credential.erase(&index_url),
- _ => {
- return Err(format!(
- "unexpected command-line argument `{}`, expected get/store/erase",
- which
- )
- .into())
+ let request: CredentialRequest = serde_json::from_str(&buffer)?;
+ if request.v != PROTOCOL_VERSION_1 {
+ return Err(format!("unsupported protocol version {}", request.v).into());
}
- };
- result.map_err(|e| format!("failed to `{}` token: {}", which, e).into())
+
+ let response = stdin_stdout_to_console(|| {
+ credential.perform(&request.registry, &request.action, &request.args)
+ })?;
+
+ serde_json::to_writer(std::io::stdout(), &response)?;
+ println!();
+ }
}
-fn read_token() -> Result<String, Error> {
- let mut buffer = String::new();
- std::io::stdin().read_line(&mut buffer)?;
- if buffer.ends_with('\n') {
- buffer.pop();
+/// Read a line of text from stdin.
+pub fn read_line() -> Result<String, io::Error> {
+ let mut buf = String::new();
+ io::stdin().read_line(&mut buf)?;
+ Ok(buf.trim().to_string())
+}
+
+/// Prompt the user for a token.
+pub fn read_token(
+ login_options: &LoginOptions,
+ registry: &RegistryInfo,
+) -> Result<Secret<String>, Error> {
+ if let Some(token) = &login_options.token {
+ return Ok(token.to_owned());
}
- Ok(buffer)
+
+ if let Some(url) = login_options.login_url {
+ eprintln!("please paste the token found on {url} below");
+ } else if let Some(name) = registry.name {
+ eprintln!("please paste the token for {name} below");
+ } else {
+ eprintln!("please paste the token for {} below", registry.index_url);
+ }
+
+ Ok(Secret::from(read_line().map_err(Box::new)?))
}
diff --git a/src/tools/cargo/credential/cargo-credential/src/secret.rs b/src/tools/cargo/credential/cargo-credential/src/secret.rs
new file mode 100644
index 000000000..1c2314d8e
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential/src/secret.rs
@@ -0,0 +1,101 @@
+use std::fmt;
+use std::ops::Deref;
+
+use serde::{Deserialize, Serialize};
+
+/// A wrapper for values that should not be printed.
+///
+/// This type does not implement `Display`, and has a `Debug` impl that hides
+/// the contained value.
+///
+/// ```
+/// # use cargo_credential::Secret;
+/// let token = Secret::from("super secret string");
+/// assert_eq!(format!("{:?}", token), "Secret { inner: \"REDACTED\" }");
+/// ```
+///
+/// Currently, we write a borrowed `Secret<T>` as `Secret<&T>`.
+/// The [`as_deref`](Secret::as_deref) and [`to_owned`](Secret::to_owned) methods can
+/// be used to convert back and forth between `Secret<String>` and `Secret<&str>`.
+#[derive(Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
+#[serde(transparent)]
+pub struct Secret<T> {
+ inner: T,
+}
+
+impl<T> Secret<T> {
+ /// Unwraps the contained value.
+ ///
+ /// Use of this method marks the boundary of where the contained value is
+ /// hidden.
+ pub fn expose(self) -> T {
+ self.inner
+ }
+
+ /// Converts a `Secret<T>` to a `Secret<&T::Target>`.
+ /// ```
+ /// # use cargo_credential::Secret;
+ /// let owned: Secret<String> = Secret::from(String::from("token"));
+ /// let borrowed: Secret<&str> = owned.as_deref();
+ /// ```
+ pub fn as_deref(&self) -> Secret<&<T as Deref>::Target>
+ where
+ T: Deref,
+ {
+ Secret::from(self.inner.deref())
+ }
+
+ /// Converts a `Secret<T>` to a `Secret<&T>`.
+ pub fn as_ref(&self) -> Secret<&T> {
+ Secret::from(&self.inner)
+ }
+
+ /// Converts a `Secret<T>` to a `Secret<U>` by applying `f` to the contained value.
+ pub fn map<U, F>(self, f: F) -> Secret<U>
+ where
+ F: FnOnce(T) -> U,
+ {
+ Secret::from(f(self.inner))
+ }
+}
+
+impl<T: ToOwned + ?Sized> Secret<&T> {
+ /// Converts a `Secret` containing a borrowed type to a `Secret` containing the
+ /// corresponding owned type.
+ /// ```
+ /// # use cargo_credential::Secret;
+ /// let borrowed: Secret<&str> = Secret::from("token");
+ /// let owned: Secret<String> = borrowed.to_owned();
+ /// ```
+ pub fn to_owned(&self) -> Secret<<T as ToOwned>::Owned> {
+ Secret::from(self.inner.to_owned())
+ }
+}
+
+impl<T, E> Secret<Result<T, E>> {
+ /// Converts a `Secret<Result<T, E>>` to a `Result<Secret<T>, E>`.
+ pub fn transpose(self) -> Result<Secret<T>, E> {
+ self.inner.map(|v| Secret::from(v))
+ }
+}
+
+impl<T: AsRef<str>> Secret<T> {
+ /// Checks if the contained value is empty.
+ pub fn is_empty(&self) -> bool {
+ self.inner.as_ref().is_empty()
+ }
+}
+
+impl<T> From<T> for Secret<T> {
+ fn from(inner: T) -> Self {
+ Self { inner }
+ }
+}
+
+impl<T> fmt::Debug for Secret<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Secret")
+ .field("inner", &"REDACTED")
+ .finish()
+ }
+}
diff --git a/src/tools/cargo/credential/cargo-credential/src/stdio.rs b/src/tools/cargo/credential/cargo-credential/src/stdio.rs
new file mode 100644
index 000000000..25435056f
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential/src/stdio.rs
@@ -0,0 +1,163 @@
+use std::{fs::File, io::Error};
+
+/// Reset stdin and stdout to the attached console / tty for the duration of the closure.
+/// If no console is available, stdin and stdout will be redirected to null.
+pub fn stdin_stdout_to_console<F, T>(f: F) -> Result<T, Error>
+where
+ F: FnOnce() -> T,
+{
+ let open_write = |f| std::fs::OpenOptions::new().write(true).open(f);
+
+ let mut stdin = File::open(imp::IN_DEVICE).or_else(|_| File::open(imp::NULL_DEVICE))?;
+ let mut stdout = open_write(imp::OUT_DEVICE).or_else(|_| open_write(imp::NULL_DEVICE))?;
+
+ let _stdin_guard = imp::ReplacementGuard::new(Stdio::Stdin, &mut stdin)?;
+ let _stdout_guard = imp::ReplacementGuard::new(Stdio::Stdout, &mut stdout)?;
+ Ok(f())
+}
+
+enum Stdio {
+ Stdin,
+ Stdout,
+}
+
+#[cfg(windows)]
+mod imp {
+ use super::Stdio;
+ use std::{fs::File, io::Error, os::windows::prelude::AsRawHandle};
+ use windows_sys::Win32::{
+ Foundation::{HANDLE, INVALID_HANDLE_VALUE},
+ System::Console::{
+ GetStdHandle, SetStdHandle, STD_HANDLE, STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
+ },
+ };
+ pub const OUT_DEVICE: &str = "CONOUT$";
+ pub const IN_DEVICE: &str = "CONIN$";
+ pub const NULL_DEVICE: &str = "NUL";
+
+ /// Restores previous stdio when dropped.
+ pub struct ReplacementGuard {
+ std_handle: STD_HANDLE,
+ previous: HANDLE,
+ }
+
+ impl ReplacementGuard {
+ pub(super) fn new(stdio: Stdio, replacement: &mut File) -> Result<ReplacementGuard, Error> {
+ let std_handle = match stdio {
+ Stdio::Stdin => STD_INPUT_HANDLE,
+ Stdio::Stdout => STD_OUTPUT_HANDLE,
+ };
+
+ let previous;
+ unsafe {
+ // Make a copy of the current handle
+ previous = GetStdHandle(std_handle);
+ if previous == INVALID_HANDLE_VALUE {
+ return Err(std::io::Error::last_os_error());
+ }
+
+ // Replace stdin with the replacement handle
+ if SetStdHandle(std_handle, replacement.as_raw_handle() as HANDLE) == 0 {
+ return Err(std::io::Error::last_os_error());
+ }
+ }
+
+ Ok(ReplacementGuard {
+ previous,
+ std_handle,
+ })
+ }
+ }
+
+ impl Drop for ReplacementGuard {
+ fn drop(&mut self) {
+ unsafe {
+ // Put previous handle back in to stdin
+ SetStdHandle(self.std_handle, self.previous);
+ }
+ }
+ }
+}
+
+#[cfg(unix)]
+mod imp {
+ use super::Stdio;
+ use libc::{close, dup, dup2, STDIN_FILENO, STDOUT_FILENO};
+ use std::{fs::File, io::Error, os::fd::AsRawFd};
+ pub const IN_DEVICE: &str = "/dev/tty";
+ pub const OUT_DEVICE: &str = "/dev/tty";
+ pub const NULL_DEVICE: &str = "/dev/null";
+
+ /// Restores previous stdio when dropped.
+ pub struct ReplacementGuard {
+ std_fileno: i32,
+ previous: i32,
+ }
+
+ impl ReplacementGuard {
+ pub(super) fn new(stdio: Stdio, replacement: &mut File) -> Result<ReplacementGuard, Error> {
+ let std_fileno = match stdio {
+ Stdio::Stdin => STDIN_FILENO,
+ Stdio::Stdout => STDOUT_FILENO,
+ };
+
+ let previous;
+ unsafe {
+ // Duplicate the existing stdin file to a new descriptor
+ previous = dup(std_fileno);
+ if previous == -1 {
+ return Err(std::io::Error::last_os_error());
+ }
+ // Replace stdin with the replacement file
+ if dup2(replacement.as_raw_fd(), std_fileno) == -1 {
+ return Err(std::io::Error::last_os_error());
+ }
+ }
+
+ Ok(ReplacementGuard {
+ previous,
+ std_fileno,
+ })
+ }
+ }
+
+ impl Drop for ReplacementGuard {
+ fn drop(&mut self) {
+ unsafe {
+ // Put previous file back in to stdin
+ dup2(self.previous, self.std_fileno);
+ // Close the file descriptor we used as a backup
+ close(self.previous);
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use std::fs::OpenOptions;
+ use std::io::{Seek, Write};
+
+ use super::imp::ReplacementGuard;
+ use super::Stdio;
+
+ #[test]
+ fn stdin() {
+ let tempdir = snapbox::path::PathFixture::mutable_temp().unwrap();
+ let file = tempdir.path().unwrap().join("stdin");
+ let mut file = OpenOptions::new()
+ .read(true)
+ .write(true)
+ .create(true)
+ .open(file)
+ .unwrap();
+
+ writeln!(&mut file, "hello").unwrap();
+ file.seek(std::io::SeekFrom::Start(0)).unwrap();
+ {
+ let _guard = ReplacementGuard::new(Stdio::Stdin, &mut file).unwrap();
+ let line = std::io::stdin().lines().next().unwrap().unwrap();
+ assert_eq!(line, "hello");
+ }
+ }
+}
diff --git a/src/tools/cargo/credential/cargo-credential/tests/examples.rs b/src/tools/cargo/credential/cargo-credential/tests/examples.rs
new file mode 100644
index 000000000..87fdb8de3
--- /dev/null
+++ b/src/tools/cargo/credential/cargo-credential/tests/examples.rs
@@ -0,0 +1,45 @@
+use std::path::Path;
+
+use snapbox::cmd::Command;
+
+#[test]
+fn stdout_redirected() {
+ let bin = snapbox::cmd::compile_example("stdout-redirected", []).unwrap();
+
+ let hello = r#"{"v":[1]}"#;
+ let get_request = r#"{"v": 1, "registry": {"index-url":"sparse+https://test/","name":"alternative"},"kind": "get","operation": "read","args": []}"#;
+ let err_not_supported = r#"{"Err":{"kind":"operation-not-supported"}}"#;
+
+ Command::new(bin)
+ .stdin(format!("{get_request}\n"))
+ .arg("--cargo-plugin")
+ .assert()
+ .stdout_eq(format!("{hello}\n{err_not_supported}\n"))
+ .stderr_eq("message on stderr should be sent the the parent process\n")
+ .success();
+}
+
+#[test]
+fn file_provider() {
+ let bin = snapbox::cmd::compile_example("file-provider", []).unwrap();
+
+ let hello = r#"{"v":[1]}"#;
+ let login_request = r#"{"v": 1,"registry": {"index-url":"https://github.com/rust-lang/crates.io-index","name":"crates-io"},"kind": "login","token": "s3krit","args": []}"#;
+ let login_response = r#"{"Ok":{"kind":"login"}}"#;
+
+ let get_request = r#"{"v": 1,"registry": {"index-url":"https://github.com/rust-lang/crates.io-index","name":"crates-io"},"kind": "get","operation": "read","args": []}"#;
+ let get_response =
+ r#"{"Ok":{"kind":"get","token":"s3krit","cache":"session","operation_independent":true}}"#;
+
+ let dir = Path::new(env!("CARGO_TARGET_TMPDIR")).join("cargo-credential-tests");
+ std::fs::create_dir(&dir).unwrap();
+ Command::new(bin)
+ .current_dir(&dir)
+ .stdin(format!("{login_request}\n{get_request}\n"))
+ .arg("--cargo-plugin")
+ .assert()
+ .stdout_eq(format!("{hello}\n{login_response}\n{get_response}\n"))
+ .stderr_eq("")
+ .success();
+ std::fs::remove_dir_all(&dir).unwrap();
+}
diff --git a/src/tools/cargo/deny.toml b/src/tools/cargo/deny.toml
index 89d08eacc..383648171 100644
--- a/src/tools/cargo/deny.toml
+++ b/src/tools/cargo/deny.toml
@@ -109,6 +109,7 @@ allow = [
"MPL-2.0",
"Unicode-DFS-2016",
"CC0-1.0",
+ "ISC",
]
# List of explicitly disallowed licenses
# See https://spdx.org/licenses/ for list of possible licenses
diff --git a/src/tools/cargo/publish.py b/src/tools/cargo/publish.py
index 5ace18f72..13077a69b 100755
--- a/src/tools/cargo/publish.py
+++ b/src/tools/cargo/publish.py
@@ -1,6 +1,12 @@
#!/usr/bin/env python3
# This script is used to publish Cargo to crates.io.
+#
+# This is run automatically every 6 weeks by the Release team's automation
+# whose source is at https://github.com/rust-lang/simpleinfra/.
+#
+# See https://doc.crates.io/contrib/process/release.html for more about
+# Cargo's release process.
import os
import re
diff --git a/src/tools/cargo/src/bin/cargo/cli.rs b/src/tools/cargo/src/bin/cargo/cli.rs
index db52bc8f2..eb337d681 100644
--- a/src/tools/cargo/src/bin/cargo/cli.rs
+++ b/src/tools/cargo/src/bin/cargo/cli.rs
@@ -518,12 +518,14 @@ pub fn cli() -> Command {
let usage = if is_rustup {
"cargo [+toolchain] [OPTIONS] [COMMAND]\n cargo [+toolchain] [OPTIONS] -Zscript <MANIFEST_RS> [ARGS]..."
} else {
- "cargo [OPTIONS] [COMMAND]\n cargo [OPTIONS] -Zscript <MANIFEST> [ARGS]..."
+ "cargo [OPTIONS] [COMMAND]\n cargo [OPTIONS] -Zscript <MANIFEST_RS> [ARGS]..."
};
Command::new("cargo")
// Subcommands all count their args' display order independently (from 0),
// which makes their args interspersed with global args. This puts global args last.
- .next_display_order(1000)
+ //
+ // We also want these to come before auto-generated `--help`
+ .next_display_order(800)
.allow_external_subcommands(true)
// Doesn't mix well with our list of common cargo commands. See clap-rs/clap#3108 for
// opening clap up to allow us to style our help template
@@ -586,9 +588,21 @@ See 'cargo help <command>' for more information on a specific command.\n",
.value_hint(clap::ValueHint::DirPath)
.value_parser(clap::builder::ValueParser::path_buf()),
)
- .arg(flag("frozen", "Require Cargo.lock and cache are up to date").global(true))
- .arg(flag("locked", "Require Cargo.lock is up to date").global(true))
- .arg(flag("offline", "Run without accessing the network").global(true))
+ .arg(
+ flag("frozen", "Require Cargo.lock and cache are up to date")
+ .help_heading(heading::MANIFEST_OPTIONS)
+ .global(true),
+ )
+ .arg(
+ flag("locked", "Require Cargo.lock is up to date")
+ .help_heading(heading::MANIFEST_OPTIONS)
+ .global(true),
+ )
+ .arg(
+ flag("offline", "Run without accessing the network")
+ .help_heading(heading::MANIFEST_OPTIONS)
+ .global(true),
+ )
.arg(multi_opt("config", "KEY=VALUE", "Override a configuration value").global(true))
.arg(
Arg::new("unstable-features")
diff --git a/src/tools/cargo/src/bin/cargo/commands/add.rs b/src/tools/cargo/src/bin/cargo/commands/add.rs
index 52fc38b74..56df76268 100644
--- a/src/tools/cargo/src/bin/cargo/commands/add.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/add.rs
@@ -79,8 +79,8 @@ Example uses:
])
.arg_manifest_path()
.arg_package("Package to modify")
- .arg_quiet()
.arg_dry_run("Don't actually write the manifest")
+ .arg_quiet()
.next_help_heading("Source")
.args([
clap::Arg::new("path")
diff --git a/src/tools/cargo/src/bin/cargo/commands/bench.rs b/src/tools/cargo/src/bin/cargo/commands/bench.rs
index 3739d880e..bb2c193b0 100644
--- a/src/tools/cargo/src/bin/cargo/commands/bench.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/bench.rs
@@ -4,7 +4,7 @@ use cargo::ops::{self, TestOptions};
pub fn cli() -> Command {
subcommand("bench")
.about("Execute all benchmarks of a local package")
- .arg_quiet()
+ .next_display_order(0)
.arg(
Arg::new("BENCHNAME")
.action(ArgAction::Set)
@@ -16,6 +16,19 @@ pub fn cli() -> Command {
.num_args(0..)
.last(true),
)
+ .arg(flag("no-run", "Compile, but don't run benchmarks"))
+ .arg(flag(
+ "no-fail-fast",
+ "Run all benchmarks regardless of failure",
+ ))
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_quiet()
+ .arg_package_spec(
+ "Package to run benchmarks for",
+ "Benchmark all packages in the workspace",
+ "Exclude packages from the benchmark",
+ )
.arg_targets_all(
"Benchmark only this package's library",
"Benchmark only the specified binary",
@@ -28,31 +41,31 @@ pub fn cli() -> Command {
"Benchmark all benches",
"Benchmark all targets",
)
- .arg(flag("no-run", "Compile, but don't run benchmarks"))
- .arg_package_spec(
- "Package to run benchmarks for",
- "Benchmark all packages in the workspace",
- "Exclude packages from the benchmark",
- )
- .arg_jobs()
- .arg_profile("Build artifacts with the specified profile")
.arg_features()
+ .arg_jobs_without_keep_going()
+ .arg(flag("keep-going", "Use `--no-fail-fast` instead").hide(true)) // See rust-lang/cargo#11702
+ .arg_profile("Build artifacts with the specified profile")
.arg_target_triple("Build for the target triple")
.arg_target_dir()
- .arg_manifest_path()
- .arg_ignore_rust_version()
- .arg_message_format()
- .arg(flag(
- "no-fail-fast",
- "Run all benchmarks regardless of failure",
- ))
.arg_unit_graph()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help bench` for more detailed information.\n")
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
+
+ if args.keep_going() {
+ return Err(anyhow::format_err!(
+ "\
+unexpected argument `--keep-going` found
+
+ tip: to run as many benchmarks as possible without failing fast, use `--no-fail-fast`"
+ )
+ .into());
+ }
+
let mut compile_opts = args.compile_options(
config,
CompileMode::Bench,
diff --git a/src/tools/cargo/src/bin/cargo/commands/build.rs b/src/tools/cargo/src/bin/cargo/commands/build.rs
index a78da38a4..e25638aa0 100644
--- a/src/tools/cargo/src/bin/cargo/commands/build.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/build.rs
@@ -7,13 +7,15 @@ pub fn cli() -> Command {
// subcommand aliases are handled in aliased_command()
// .alias("b")
.about("Compile a local package and all of its dependencies")
+ .arg_ignore_rust_version()
+ .arg_future_incompat_report()
+ .arg_message_format()
.arg_quiet()
.arg_package_spec(
"Package to build (see `cargo help pkgid`)",
"Build all packages in the workspace",
"Exclude packages from the build",
)
- .arg_jobs()
.arg_targets_all(
"Build only this package's library",
"Build only the specified binary",
@@ -26,9 +28,10 @@ pub fn cli() -> Command {
"Build all benches",
"Build all targets",
)
+ .arg_features()
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
+ .arg_jobs()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
.arg(
@@ -36,15 +39,13 @@ pub fn cli() -> Command {
"out-dir",
"Copy final artifacts to this directory (unstable)",
)
- .value_name("PATH"),
+ .value_name("PATH")
+ .help_heading(heading::COMPILATION_OPTIONS),
)
- .arg_manifest_path()
- .arg_ignore_rust_version()
- .arg_message_format()
.arg_build_plan()
.arg_unit_graph()
- .arg_future_incompat_report()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help build` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/check.rs b/src/tools/cargo/src/bin/cargo/commands/check.rs
index c9f6e0b38..ab6f99048 100644
--- a/src/tools/cargo/src/bin/cargo/commands/check.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/check.rs
@@ -7,13 +7,15 @@ pub fn cli() -> Command {
// subcommand aliases are handled in aliased_command()
// .alias("c")
.about("Check a local package and all of its dependencies for errors")
+ .arg_ignore_rust_version()
+ .arg_future_incompat_report()
+ .arg_message_format()
.arg_quiet()
.arg_package_spec(
"Package(s) to check",
"Check all packages in the workspace",
"Exclude packages from the check",
)
- .arg_jobs()
.arg_targets_all(
"Check only this package's library",
"Check only the specified binary",
@@ -26,17 +28,15 @@ pub fn cli() -> Command {
"Check all benches",
"Check all targets",
)
+ .arg_features()
+ .arg_jobs()
.arg_release("Check artifacts in release mode, with optimizations")
.arg_profile("Check artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Check for the target triple")
.arg_target_dir()
- .arg_manifest_path()
- .arg_ignore_rust_version()
- .arg_message_format()
.arg_unit_graph()
- .arg_future_incompat_report()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help check` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/clean.rs b/src/tools/cargo/src/bin/cargo/commands/clean.rs
index 162461c47..9fa3c8527 100644
--- a/src/tools/cargo/src/bin/cargo/commands/clean.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/clean.rs
@@ -6,14 +6,14 @@ use cargo::util::print_available_packages;
pub fn cli() -> Command {
subcommand("clean")
.about("Remove artifacts that cargo has generated in the past")
+ .arg_doc("Whether or not to clean just the documentation directory")
.arg_quiet()
.arg_package_spec_simple("Package to clean artifacts for")
- .arg_manifest_path()
- .arg_target_triple("Target triple to clean output for")
- .arg_target_dir()
.arg_release("Whether or not to clean release artifacts")
.arg_profile("Clean artifacts of the specified profile")
- .arg_doc("Whether or not to clean just the documentation directory")
+ .arg_target_triple("Target triple to clean output for")
+ .arg_target_dir()
+ .arg_manifest_path()
.after_help("Run `cargo help clean` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/doc.rs b/src/tools/cargo/src/bin/cargo/commands/doc.rs
index 932058afb..c3dfe426d 100644
--- a/src/tools/cargo/src/bin/cargo/commands/doc.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/doc.rs
@@ -7,22 +7,24 @@ pub fn cli() -> Command {
// subcommand aliases are handled in aliased_command()
// .alias("d")
.about("Build a package's documentation")
- .arg_quiet()
.arg(flag(
"open",
"Opens the docs in a browser after the operation",
))
- .arg_package_spec(
- "Package to document",
- "Document all packages in the workspace",
- "Exclude packages from the build",
- )
.arg(flag(
"no-deps",
"Don't build documentation for dependencies",
))
.arg(flag("document-private-items", "Document private items"))
- .arg_jobs()
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_quiet()
+ .arg_package_spec(
+ "Package to document",
+ "Document all packages in the workspace",
+ "Exclude packages from the build",
+ )
+ .arg_features()
.arg_targets_lib_bin_example(
"Document only this package's library",
"Document only the specified binary",
@@ -30,16 +32,14 @@ pub fn cli() -> Command {
"Document only the specified example",
"Document all examples",
)
+ .arg_jobs()
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
- .arg_manifest_path()
- .arg_message_format()
- .arg_ignore_rust_version()
.arg_unit_graph()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help doc` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/fetch.rs b/src/tools/cargo/src/bin/cargo/commands/fetch.rs
index 2fbbc478c..4b1fcb40f 100644
--- a/src/tools/cargo/src/bin/cargo/commands/fetch.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/fetch.rs
@@ -7,8 +7,8 @@ pub fn cli() -> Command {
subcommand("fetch")
.about("Fetch dependencies of a package from the network")
.arg_quiet()
- .arg_manifest_path()
.arg_target_triple("Fetch dependencies for the target triple")
+ .arg_manifest_path()
.after_help("Run `cargo help fetch` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/fix.rs b/src/tools/cargo/src/bin/cargo/commands/fix.rs
index 5238d5852..1f98dd67e 100644
--- a/src/tools/cargo/src/bin/cargo/commands/fix.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/fix.rs
@@ -5,13 +5,35 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("fix")
.about("Automatically fix lint warnings reported by rustc")
+ .arg(flag("edition", "Fix in preparation for the next edition"))
+ .arg(flag(
+ "edition-idioms",
+ "Fix warnings to migrate to the idioms of an edition",
+ ))
+ .arg(flag(
+ "broken-code",
+ "Fix code even if it already has compiler errors",
+ ))
+ .arg(flag(
+ "allow-no-vcs",
+ "Fix code even if a VCS was not detected",
+ ))
+ .arg(flag(
+ "allow-dirty",
+ "Fix code even if the working directory is dirty",
+ ))
+ .arg(flag(
+ "allow-staged",
+ "Fix code even if the working directory has staged changes",
+ ))
+ .arg_ignore_rust_version()
+ .arg_message_format()
.arg_quiet()
.arg_package_spec(
"Package(s) to fix",
"Fix all packages in the workspace",
"Exclude packages from the fixes",
)
- .arg_jobs()
.arg_targets_all(
"Fix only this package's library",
"Fix only the specified binary",
@@ -24,36 +46,14 @@ pub fn cli() -> Command {
"Fix all benches",
"Fix all targets (default)",
)
+ .arg_features()
+ .arg_jobs()
.arg_release("Fix artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Fix for the target triple")
.arg_target_dir()
- .arg_manifest_path()
- .arg_message_format()
- .arg(flag(
- "broken-code",
- "Fix code even if it already has compiler errors",
- ))
- .arg(flag("edition", "Fix in preparation for the next edition"))
- .arg(flag(
- "edition-idioms",
- "Fix warnings to migrate to the idioms of an edition",
- ))
- .arg(flag(
- "allow-no-vcs",
- "Fix code even if a VCS was not detected",
- ))
- .arg(flag(
- "allow-dirty",
- "Fix code even if the working directory is dirty",
- ))
- .arg(flag(
- "allow-staged",
- "Fix code even if the working directory has staged changes",
- ))
- .arg_ignore_rust_version()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help fix` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/init.rs b/src/tools/cargo/src/bin/cargo/commands/init.rs
index b280d4fe4..fdb3dc208 100644
--- a/src/tools/cargo/src/bin/cargo/commands/init.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/init.rs
@@ -5,10 +5,10 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("init")
.about("Create a new cargo package in an existing directory")
- .arg_quiet()
.arg(Arg::new("path").action(ArgAction::Set).default_value("."))
- .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.arg_new_opts()
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_quiet()
.after_help("Run `cargo help init` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/install.rs b/src/tools/cargo/src/bin/cargo/commands/install.rs
index 3bb90c2d5..8abb00190 100644
--- a/src/tools/cargo/src/bin/cargo/commands/install.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/install.rs
@@ -10,7 +10,6 @@ use cargo_util::paths;
pub fn cli() -> Command {
subcommand("install")
.about("Install a Rust binary. Default location is $HOME/.cargo/bin")
- .arg_quiet()
.arg(
Arg::new("crate")
.value_parser(clap::builder::NonEmptyStringValueParser::new())
@@ -23,6 +22,18 @@ pub fn cli() -> Command {
.requires("crate"),
)
.arg(
+ opt("index", "Registry index to install from")
+ .value_name("INDEX")
+ .requires("crate")
+ .conflicts_with_all(&["git", "path", "registry"]),
+ )
+ .arg(
+ opt("registry", "Registry to use")
+ .value_name("REGISTRY")
+ .requires("crate")
+ .conflicts_with_all(&["git", "path", "index"]),
+ )
+ .arg(
opt("git", "Git URL to install the specified crate from")
.value_name("URL")
.conflicts_with_all(&["path", "index", "registry"]),
@@ -47,42 +58,31 @@ pub fn cli() -> Command {
.value_name("PATH")
.conflicts_with_all(&["git", "index", "registry"]),
)
- .arg(flag(
- "list",
- "list all installed packages and their versions",
- ))
- .arg_jobs()
+ .arg(opt("root", "Directory to install packages into").value_name("DIR"))
.arg(flag("force", "Force overwriting existing crates or binaries").short('f'))
.arg(flag("no-track", "Do not save tracking information"))
- .arg_features()
- .arg_profile("Install artifacts with the specified profile")
.arg(flag(
- "debug",
- "Build in debug mode (with the 'dev' profile) instead of release mode",
+ "list",
+ "list all installed packages and their versions",
))
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_quiet()
.arg_targets_bins_examples(
"Install only the specified binary",
"Install all binaries",
"Install only the specified example",
"Install all examples",
)
+ .arg_features()
+ .arg_jobs()
+ .arg(flag(
+ "debug",
+ "Build in debug mode (with the 'dev' profile) instead of release mode",
+ ))
+ .arg_profile("Install artifacts with the specified profile")
.arg_target_triple("Build for the target triple")
.arg_target_dir()
- .arg(opt("root", "Directory to install packages into").value_name("DIR"))
- .arg(
- opt("index", "Registry index to install from")
- .value_name("INDEX")
- .requires("crate")
- .conflicts_with_all(&["git", "path", "registry"]),
- )
- .arg(
- opt("registry", "Registry to use")
- .value_name("REGISTRY")
- .requires("crate")
- .conflicts_with_all(&["git", "path", "index"]),
- )
- .arg_ignore_rust_version()
- .arg_message_format()
.arg_timings()
.after_help("Run `cargo help install` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/locate_project.rs b/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
index 26c35cd91..69f015300 100644
--- a/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/locate_project.rs
@@ -6,8 +6,7 @@ use serde::Serialize;
pub fn cli() -> Command {
subcommand("locate-project")
.about("Print a JSON representation of a Cargo.toml file's location")
- .arg_quiet()
- .arg_manifest_path()
+ .arg(flag("workspace", "Locate Cargo.toml of the workspace root"))
.arg(
opt(
"message-format",
@@ -15,7 +14,8 @@ pub fn cli() -> Command {
)
.value_name("FMT"),
)
- .arg(flag("workspace", "Locate Cargo.toml of the workspace root"))
+ .arg_quiet()
+ .arg_manifest_path()
.after_help("Run `cargo help locate-project` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/login.rs b/src/tools/cargo/src/bin/cargo/commands/login.rs
index 1c8d3ae4c..e51adaa1c 100644
--- a/src/tools/cargo/src/bin/cargo/commands/login.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/login.rs
@@ -4,44 +4,31 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("login")
- .about(
- "Save an api token from the registry locally. \
- If token is not specified, it will be read from stdin.",
- )
- .arg_quiet()
+ .about("Log in to a registry.")
.arg(Arg::new("token").action(ArgAction::Set))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.arg(
- flag(
- "generate-keypair",
- "Generate a public/secret keypair (unstable)",
- )
- .conflicts_with("token"),
- )
- .arg(
- flag("secret-key", "Prompt for secret key (unstable)")
- .conflicts_with_all(&["generate-keypair", "token"]),
- )
- .arg(
- opt(
- "key-subject",
- "Set the key subject for this registry (unstable)",
- )
- .value_name("SUBJECT")
- .conflicts_with("token"),
+ Arg::new("args")
+ .help("Arguments for the credential provider (unstable)")
+ .num_args(0..)
+ .last(true),
)
+ .arg_quiet()
.after_help("Run `cargo help login` for more detailed information.\n")
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?;
+ let extra_args = args
+ .get_many::<String>("args")
+ .unwrap_or_default()
+ .map(String::as_str)
+ .collect::<Vec<_>>();
ops::registry_login(
config,
args.get_one::<String>("token").map(|s| s.as_str().into()),
registry.as_deref(),
- args.flag("generate-keypair"),
- args.flag("secret-key"),
- args.get_one("key-subject").map(String::as_str),
+ &extra_args,
)?;
Ok(())
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/logout.rs b/src/tools/cargo/src/bin/cargo/commands/logout.rs
index 0b4d8b83f..4320240c6 100644
--- a/src/tools/cargo/src/bin/cargo/commands/logout.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/logout.rs
@@ -4,8 +4,8 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("logout")
.about("Remove an API token from the registry locally")
- .arg_quiet()
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_quiet()
.after_help("Run `cargo help logout` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/metadata.rs b/src/tools/cargo/src/bin/cargo/commands/metadata.rs
index fdf59654c..54257dee3 100644
--- a/src/tools/cargo/src/bin/cargo/commands/metadata.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/metadata.rs
@@ -8,8 +8,6 @@ pub fn cli() -> Command {
the concrete used versions including overrides, \
in machine-readable format",
)
- .arg_quiet()
- .arg_features()
.arg(multi_opt(
"filter-platform",
"TRIPLE",
@@ -20,12 +18,14 @@ pub fn cli() -> Command {
"Output information only about the workspace members \
and don't fetch dependencies",
))
- .arg_manifest_path()
.arg(
opt("format-version", "Format version")
.value_name("VERSION")
.value_parser(["1"]),
)
+ .arg_quiet()
+ .arg_features()
+ .arg_manifest_path()
.after_help("Run `cargo help metadata` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/new.rs b/src/tools/cargo/src/bin/cargo/commands/new.rs
index 18cf93d2e..6124444c0 100644
--- a/src/tools/cargo/src/bin/cargo/commands/new.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/new.rs
@@ -5,10 +5,10 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("new")
.about("Create a new cargo package at <path>")
- .arg_quiet()
.arg(Arg::new("path").action(ArgAction::Set).required(true))
- .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.arg_new_opts()
+ .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_quiet()
.after_help("Run `cargo help new` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/owner.rs b/src/tools/cargo/src/bin/cargo/commands/owner.rs
index 493072b7b..223327c31 100644
--- a/src/tools/cargo/src/bin/cargo/commands/owner.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/owner.rs
@@ -1,12 +1,11 @@
use crate::command_prelude::*;
use cargo::ops::{self, OwnersOptions};
-use cargo::util::auth::Secret;
+use cargo_credential::Secret;
pub fn cli() -> Command {
subcommand("owner")
.about("Manage the owners of a crate on the registry")
- .arg_quiet()
.arg(Arg::new("crate").action(ArgAction::Set))
.arg(
multi_opt(
@@ -28,6 +27,7 @@ pub fn cli() -> Command {
.arg(opt("index", "Registry index to modify owners for").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_quiet()
.after_help("Run `cargo help owner` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/package.rs b/src/tools/cargo/src/bin/cargo/commands/package.rs
index ac6b1fe27..cf4ac795c 100644
--- a/src/tools/cargo/src/bin/cargo/commands/package.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/package.rs
@@ -5,7 +5,6 @@ use cargo::ops::{self, PackageOpts};
pub fn cli() -> Command {
subcommand("package")
.about("Assemble the local package into a distributable tarball")
- .arg_quiet()
.arg(
flag(
"list",
@@ -25,21 +24,29 @@ pub fn cli() -> Command {
"allow-dirty",
"Allow dirty working directories to be packaged",
))
- .arg_target_triple("Build for the target triple")
- .arg_target_dir()
- .arg_features()
+ .arg_quiet()
.arg_package_spec_no_all(
"Package(s) to assemble",
"Assemble all packages in the workspace",
"Don't assemble specified packages",
)
- .arg_manifest_path()
+ .arg_features()
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
.arg_jobs()
+ .arg_manifest_path()
.after_help("Run `cargo help package` for more detailed information.\n")
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
+ if ws.root_maybe().is_embedded() {
+ return Err(anyhow::format_err!(
+ "{} is unsupported by `cargo package`",
+ ws.root_manifest().display()
+ )
+ .into());
+ }
let specs = args.packages_from_flags()?;
ops::package(
diff --git a/src/tools/cargo/src/bin/cargo/commands/pkgid.rs b/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
index 664db75bd..ba4540cf1 100644
--- a/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/pkgid.rs
@@ -6,8 +6,8 @@ use cargo::util::print_available_packages;
pub fn cli() -> Command {
subcommand("pkgid")
.about("Print a fully qualified package specification")
- .arg_quiet()
.arg(Arg::new("spec").action(ArgAction::Set))
+ .arg_quiet()
.arg_package("Argument to get the package ID specifier for")
.arg_manifest_path()
.after_help("Run `cargo help pkgid` for more detailed information.\n")
@@ -15,6 +15,13 @@ pub fn cli() -> Command {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
+ if ws.root_maybe().is_embedded() {
+ return Err(anyhow::format_err!(
+ "{} is unsupported by `cargo pkgid`",
+ ws.root_manifest().display()
+ )
+ .into());
+ }
if args.is_present_with_zero_values("package") {
print_available_packages(&ws)?
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/publish.rs b/src/tools/cargo/src/bin/cargo/commands/publish.rs
index c831d399f..bda240c8c 100644
--- a/src/tools/cargo/src/bin/cargo/commands/publish.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/publish.rs
@@ -5,8 +5,9 @@ use cargo::ops::{self, PublishOpts};
pub fn cli() -> Command {
subcommand("publish")
.about("Upload a package to the registry")
- .arg_quiet()
+ .arg_dry_run("Perform all checks without uploading")
.arg_index()
+ .arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
.arg(opt("token", "Token to use when uploading").value_name("TOKEN"))
.arg(flag(
"no-verify",
@@ -16,20 +17,26 @@ pub fn cli() -> Command {
"allow-dirty",
"Allow dirty working directories to be packaged",
))
- .arg_target_triple("Build for the target triple")
- .arg_target_dir()
+ .arg_quiet()
.arg_package("Package to publish")
- .arg_manifest_path()
.arg_features()
.arg_jobs()
- .arg_dry_run("Perform all checks without uploading")
- .arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
+ .arg_target_triple("Build for the target triple")
+ .arg_target_dir()
+ .arg_manifest_path()
.after_help("Run `cargo help publish` for more detailed information.\n")
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?;
let ws = args.workspace(config)?;
+ if ws.root_maybe().is_embedded() {
+ return Err(anyhow::format_err!(
+ "{} is unsupported by `cargo publish`",
+ ws.root_manifest().display()
+ )
+ .into());
+ }
let index = args.index()?;
ops::publish(
diff --git a/src/tools/cargo/src/bin/cargo/commands/remove.rs b/src/tools/cargo/src/bin/cargo/commands/remove.rs
index 50bc8b7e6..798e6fff6 100644
--- a/src/tools/cargo/src/bin/cargo/commands/remove.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/remove.rs
@@ -1,5 +1,6 @@
use cargo::core::dependency::DepKind;
use cargo::core::PackageIdSpec;
+use cargo::core::Resolve;
use cargo::core::Workspace;
use cargo::ops::cargo_remove::remove;
use cargo::ops::cargo_remove::RemoveOptions;
@@ -24,10 +25,8 @@ pub fn cli() -> clap::Command {
.num_args(1..)
.value_name("DEP_ID")
.help("Dependencies to be removed")])
- .arg_package("Package to remove from")
- .arg_manifest_path()
- .arg_quiet()
.arg_dry_run("Don't actually write the manifest")
+ .arg_quiet()
.next_help_heading("Section")
.args([
clap::Arg::new("dev")
@@ -49,6 +48,8 @@ pub fn cli() -> clap::Command {
.value_parser(clap::builder::NonEmptyStringValueParser::new())
.help("Remove as dependency from the given target platform"),
])
+ .arg_package("Package to remove from")
+ .arg_manifest_path()
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@@ -109,9 +110,24 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
// Reload the workspace since we've changed dependencies
let ws = args.workspace(config)?;
- resolve_ws(&ws)?;
- }
+ let resolve = {
+ // HACK: Avoid unused patch warnings by temporarily changing the verbosity.
+ // In rare cases, this might cause index update messages to not show up
+ let verbosity = ws.config().shell().verbosity();
+ ws.config()
+ .shell()
+ .set_verbosity(cargo::core::Verbosity::Quiet);
+ let resolve = resolve_ws(&ws);
+ ws.config().shell().set_verbosity(verbosity);
+ resolve?.1
+ };
+ // Attempt to gc unused patches and re-resolve if anything is removed
+ if gc_unused_patches(&workspace, &resolve)? {
+ let ws = args.workspace(config)?;
+ resolve_ws(&ws)?;
+ }
+ }
Ok(())
}
@@ -229,31 +245,6 @@ fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> {
}
}
- // Clean up the patch section
- if let Some(toml_edit::Item::Table(patch_section_table)) = manifest.get_mut("patch") {
- patch_section_table.set_implicit(true);
-
- // The key in each of the subtables is a source (either a registry or a URL)
- for (source, item) in patch_section_table.iter_mut() {
- if let toml_edit::Item::Table(patch_table) = item {
- patch_table.set_implicit(true);
-
- for (key, item) in patch_table.iter_mut() {
- let package_name =
- Dependency::from_toml(&workspace.root_manifest(), key.get(), item)?.name;
- if !source_has_match(
- &package_name,
- source.get(),
- &dependencies,
- workspace.config(),
- )? {
- *item = toml_edit::Item::None;
- }
- }
- }
- }
- }
-
// Clean up the replace section
if let Some(toml_edit::Item::Table(table)) = manifest.get_mut("replace") {
table.set_implicit(true);
@@ -310,35 +301,46 @@ fn spec_has_match(
Ok(false)
}
-/// Check whether or not a source (URL or registry name) matches any non-workspace dependencies.
-fn source_has_match(
- name: &str,
- source: &str,
- dependencies: &[Dependency],
- config: &Config,
-) -> CargoResult<bool> {
- for dep in dependencies {
- if &dep.name != name {
- continue;
- }
+/// Removes unused patches from the manifest
+fn gc_unused_patches(workspace: &Workspace<'_>, resolve: &Resolve) -> CargoResult<bool> {
+ let mut manifest: toml_edit::Document =
+ cargo_util::paths::read(workspace.root_manifest())?.parse()?;
+ let mut modified = false;
- match dep.source_id(config)? {
- MaybeWorkspace::Other(source_id) => {
- if source_id.is_registry() {
- if source_id.display_registry_name() == source
- || source_id.url().as_str() == source
+ // Clean up the patch section
+ if let Some(toml_edit::Item::Table(patch_section_table)) = manifest.get_mut("patch") {
+ patch_section_table.set_implicit(true);
+
+ for (_, item) in patch_section_table.iter_mut() {
+ if let toml_edit::Item::Table(patch_table) = item {
+ patch_table.set_implicit(true);
+
+ for (key, item) in patch_table.iter_mut() {
+ let dep = Dependency::from_toml(&workspace.root_manifest(), key.get(), item)?;
+
+ // Generate a PackageIdSpec url for querying
+ let url = if let MaybeWorkspace::Other(source_id) =
+ dep.source_id(workspace.config())?
{
- return Ok(true);
- }
- } else if source_id.is_git() {
- if source_id.url().as_str() == source {
- return Ok(true);
+ format!("{}#{}", source_id.url(), dep.name)
+ } else {
+ continue;
+ };
+
+ if PackageIdSpec::query_str(&url, resolve.unused_patches().iter().cloned())
+ .is_ok()
+ {
+ *item = toml_edit::Item::None;
+ modified = true;
}
}
}
- MaybeWorkspace::Workspace(_) => {}
}
}
- Ok(false)
+ if modified {
+ cargo_util::paths::write(workspace.root_manifest(), manifest.to_string().as_bytes())?;
+ }
+
+ Ok(modified)
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/run.rs b/src/tools/cargo/src/bin/cargo/commands/run.rs
index 366e19396..1649f72ac 100644
--- a/src/tools/cargo/src/bin/cargo/commands/run.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/run.rs
@@ -14,7 +14,6 @@ pub fn cli() -> Command {
// subcommand aliases are handled in aliased_command()
// .alias("r")
.about("Run a binary or example of the local package")
- .arg_quiet()
.arg(
Arg::new("args")
.help("Arguments for the binary or example to run")
@@ -22,21 +21,22 @@ pub fn cli() -> Command {
.num_args(0..)
.trailing_var_arg(true),
)
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_quiet()
+ .arg_package("Package with the target to run")
.arg_targets_bin_example(
"Name of the bin target to run",
"Name of the example target to run",
)
- .arg_package("Package with the target to run")
+ .arg_features()
.arg_jobs()
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
.arg_manifest_path()
- .arg_message_format()
.arg_unit_graph()
- .arg_ignore_rust_version()
.arg_timings()
.after_help("Run `cargo help run` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/rustc.rs b/src/tools/cargo/src/bin/cargo/commands/rustc.rs
index de73eb80c..0a0364e37 100644
--- a/src/tools/cargo/src/bin/cargo/commands/rustc.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/rustc.rs
@@ -8,15 +8,29 @@ const CRATE_TYPE_ARG_NAME: &str = "crate-type";
pub fn cli() -> Command {
subcommand("rustc")
.about("Compile a package, and pass extra options to the compiler")
- .arg_quiet()
.arg(
Arg::new("args")
.num_args(0..)
.help("Extra rustc flags")
.trailing_var_arg(true),
)
+ .arg(
+ opt(
+ PRINT_ARG_NAME,
+ "Output compiler information without compiling",
+ )
+ .value_name("INFO"),
+ )
+ .arg(multi_opt(
+ CRATE_TYPE_ARG_NAME,
+ "CRATE-TYPE",
+ "Comma separated list of types of crates for the compiler to emit",
+ ))
+ .arg_future_incompat_report()
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_quiet()
.arg_package("Package to build")
- .arg_jobs()
.arg_targets_all(
"Build only this package's library",
"Build only the specified binary",
@@ -29,29 +43,15 @@ pub fn cli() -> Command {
"Build all benches",
"Build all targets",
)
+ .arg_features()
+ .arg_jobs()
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Target triple which compiles will be for")
- .arg(
- opt(
- PRINT_ARG_NAME,
- "Output compiler information without compiling",
- )
- .value_name("INFO"),
- )
- .arg(multi_opt(
- CRATE_TYPE_ARG_NAME,
- "CRATE-TYPE",
- "Comma separated list of types of crates for the compiler to emit",
- ))
.arg_target_dir()
- .arg_manifest_path()
- .arg_message_format()
.arg_unit_graph()
- .arg_ignore_rust_version()
- .arg_future_incompat_report()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help rustc` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs b/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
index e87f435fd..488256ba7 100644
--- a/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/rustdoc.rs
@@ -5,7 +5,6 @@ use crate::command_prelude::*;
pub fn cli() -> Command {
subcommand("rustdoc")
.about("Build a package's documentation, using specified custom flags.")
- .arg_quiet()
.arg(
Arg::new("args")
.help("Extra rustdoc flags")
@@ -16,8 +15,10 @@ pub fn cli() -> Command {
"open",
"Opens the docs in a browser after the operation",
))
+ .arg_ignore_rust_version()
+ .arg_message_format()
+ .arg_quiet()
.arg_package("Package to document")
- .arg_jobs()
.arg_targets_all(
"Build only this package's library",
"Build only the specified binary",
@@ -30,16 +31,15 @@ pub fn cli() -> Command {
"Build all benches",
"Build all targets",
)
+ .arg_features()
+ .arg_jobs()
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
- .arg_manifest_path()
- .arg_message_format()
.arg_unit_graph()
- .arg_ignore_rust_version()
.arg_timings()
+ .arg_manifest_path()
.after_help("Run `cargo help rustdoc` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/search.rs b/src/tools/cargo/src/bin/cargo/commands/search.rs
index c55d932cc..656172e77 100644
--- a/src/tools/cargo/src/bin/cargo/commands/search.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/search.rs
@@ -7,9 +7,7 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("search")
.about("Search packages in crates.io")
- .arg_quiet()
.arg(Arg::new("query").num_args(0..))
- .arg_index()
.arg(
opt(
"limit",
@@ -17,7 +15,9 @@ pub fn cli() -> Command {
)
.value_name("LIMIT"),
)
+ .arg_index()
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg_quiet()
.after_help("Run `cargo help search` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/test.rs b/src/tools/cargo/src/bin/cargo/commands/test.rs
index 607655aaf..80c935d62 100644
--- a/src/tools/cargo/src/bin/cargo/commands/test.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/test.rs
@@ -17,6 +17,12 @@ pub fn cli() -> Command {
.num_args(0..)
.last(true),
)
+ .arg(flag("doc", "Test only this library's documentation"))
+ .arg(flag("no-run", "Compile, but don't run tests"))
+ .arg(flag("no-fail-fast", "Run all tests regardless of failure"))
+ .arg_ignore_rust_version()
+ .arg_future_incompat_report()
+ .arg_message_format()
.arg(
flag(
"quiet",
@@ -24,6 +30,11 @@ pub fn cli() -> Command {
)
.short('q'),
)
+ .arg_package_spec(
+ "Package to run tests for",
+ "Test all packages in the workspace",
+ "Exclude packages from the test",
+ )
.arg_targets_all(
"Test only this package's library unit tests",
"Test only the specified binary",
@@ -34,28 +45,18 @@ pub fn cli() -> Command {
"Test all tests",
"Test only the specified bench target",
"Test all benches",
- "Test all targets",
- )
- .arg(flag("doc", "Test only this library's documentation"))
- .arg(flag("no-run", "Compile, but don't run tests"))
- .arg(flag("no-fail-fast", "Run all tests regardless of failure"))
- .arg_package_spec(
- "Package to run tests for",
- "Test all packages in the workspace",
- "Exclude packages from the test",
+ "Test all targets (does not include doctests)",
)
- .arg_jobs()
+ .arg_features()
+ .arg_jobs_without_keep_going()
+ .arg(flag("keep-going", "Use `--no-fail-fast` instead").hide(true)) // See rust-lang/cargo#11702
.arg_release("Build artifacts in release mode, with optimizations")
.arg_profile("Build artifacts with the specified profile")
- .arg_features()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
- .arg_manifest_path()
- .arg_ignore_rust_version()
- .arg_message_format()
.arg_unit_graph()
- .arg_future_incompat_report()
.arg_timings()
+ .arg_manifest_path()
.after_help(
"Run `cargo help test` for more detailed information.\n\
Run `cargo test -- --help` for test binary options.\n",
@@ -65,6 +66,16 @@ pub fn cli() -> Command {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
+ if args.keep_going() {
+ return Err(anyhow::format_err!(
+ "\
+unexpected argument `--keep-going` found
+
+ tip: to run as many tests as possible without failing fast, use `--no-fail-fast`"
+ )
+ .into());
+ }
+
let mut compile_opts = args.compile_options(
config,
CompileMode::Test,
diff --git a/src/tools/cargo/src/bin/cargo/commands/tree.rs b/src/tools/cargo/src/bin/cargo/commands/tree.rs
index 94bf3fff1..4472765a9 100644
--- a/src/tools/cargo/src/bin/cargo/commands/tree.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/tree.rs
@@ -12,24 +12,12 @@ use std::str::FromStr;
pub fn cli() -> Command {
subcommand("tree")
.about("Display a tree visualization of a dependency graph")
- .arg_quiet()
- .arg_manifest_path()
- .arg_package_spec_no_all(
- "Package to be used as the root of the tree",
- "Display the tree for all packages in the workspace",
- "Exclude specific workspace members",
- )
.arg(
flag("all", "Deprecated, use --no-dedupe instead")
.short('a')
.hide(true),
)
- .arg(flag("all-targets", "Deprecated, use --target=all instead").hide(true))
- .arg_features()
- .arg_target_triple(
- "Filter dependencies matching the given target-triple (default host platform). \
- Pass `all` to include all targets.",
- )
+ .arg_quiet()
.arg(flag("no-dev-dependencies", "Deprecated, use -e=no-dev instead").hide(true))
.arg(
multi_opt(
@@ -96,6 +84,18 @@ pub fn cli() -> Command {
.short('V')
.hide(true),
)
+ .arg_package_spec_no_all(
+ "Package to be used as the root of the tree",
+ "Display the tree for all packages in the workspace",
+ "Exclude specific workspace members",
+ )
+ .arg_features()
+ .arg(flag("all-targets", "Deprecated, use --target=all instead").hide(true))
+ .arg_target_triple(
+ "Filter dependencies matching the given target-triple (default host platform). \
+ Pass `all` to include all targets.",
+ )
+ .arg_manifest_path()
.after_help("Run `cargo help tree` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/uninstall.rs b/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
index 46654b668..398979bf4 100644
--- a/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/uninstall.rs
@@ -5,11 +5,14 @@ use cargo::ops;
pub fn cli() -> Command {
subcommand("uninstall")
.about("Remove a Rust binary")
- .arg_quiet()
.arg(Arg::new("spec").num_args(0..))
- .arg_package_spec_simple("Package to uninstall")
- .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME"))
.arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
+ .arg_quiet()
+ .arg_package_spec_simple("Package to uninstall")
+ .arg(
+ multi_opt("bin", "NAME", "Only uninstall the binary NAME")
+ .help_heading(heading::TARGET_SELECTION),
+ )
.after_help("Run `cargo help uninstall` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/update.rs b/src/tools/cargo/src/bin/cargo/commands/update.rs
index da33e8d30..31175ef16 100644
--- a/src/tools/cargo/src/bin/cargo/commands/update.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/update.rs
@@ -6,15 +6,15 @@ use cargo::util::print_available_packages;
pub fn cli() -> Command {
subcommand("update")
.about("Update dependencies as recorded in the local lock file")
- .arg_quiet()
- .arg(flag("workspace", "Only update the workspace packages").short('w'))
- .arg_package_spec_simple("Package to update")
- .arg(flag(
- "aggressive",
- "Force updating all dependencies of SPEC as well when used with -p",
- ))
.arg_dry_run("Don't actually write the lockfile")
.arg(
+ flag(
+ "aggressive",
+ "Force updating all dependencies of SPEC as well when used with -p",
+ )
+ .conflicts_with("precise"),
+ )
+ .arg(
opt(
"precise",
"Update a single dependency to exactly PRECISE when used with -p",
@@ -22,6 +22,13 @@ pub fn cli() -> Command {
.value_name("PRECISE")
.requires("package"),
)
+ .arg_quiet()
+ .arg(
+ flag("workspace", "Only update the workspace packages")
+ .short('w')
+ .help_heading(heading::PACKAGE_SELECTION),
+ )
+ .arg_package_spec_simple("Package to update")
.arg_manifest_path()
.after_help("Run `cargo help update` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/vendor.rs b/src/tools/cargo/src/bin/cargo/commands/vendor.rs
index 1fd79ec51..69b4ee380 100644
--- a/src/tools/cargo/src/bin/cargo/commands/vendor.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/vendor.rs
@@ -5,8 +5,6 @@ use std::path::PathBuf;
pub fn cli() -> Command {
subcommand("vendor")
.about("Vendor all dependencies for a project locally")
- .arg_quiet()
- .arg_manifest_path()
.arg(
Arg::new("path")
.action(ArgAction::Set)
@@ -38,6 +36,8 @@ pub fn cli() -> Command {
.arg(flag("relative-path", "Not supported").hide(true))
.arg(flag("only-git-deps", "Not supported").hide(true))
.arg(flag("disallow-duplicates", "Not supported").hide(true))
+ .arg_quiet()
+ .arg_manifest_path()
.after_help("Run `cargo help vendor` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/commands/yank.rs b/src/tools/cargo/src/bin/cargo/commands/yank.rs
index 3dee52279..e6700bd2f 100644
--- a/src/tools/cargo/src/bin/cargo/commands/yank.rs
+++ b/src/tools/cargo/src/bin/cargo/commands/yank.rs
@@ -1,12 +1,11 @@
use crate::command_prelude::*;
use cargo::ops;
-use cargo::util::auth::Secret;
+use cargo_credential::Secret;
pub fn cli() -> Command {
subcommand("yank")
.about("Remove a pushed crate from the index")
- .arg_quiet()
.arg(Arg::new("crate").action(ArgAction::Set))
.arg(
opt("version", "The version to yank or un-yank")
@@ -18,8 +17,9 @@ pub fn cli() -> Command {
"Undo a yank, putting a version back into the index",
))
.arg(opt("index", "Registry index to yank from").value_name("INDEX"))
- .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
+ .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
+ .arg_quiet()
.after_help("Run `cargo help yank` for more detailed information.\n")
}
diff --git a/src/tools/cargo/src/bin/cargo/main.rs b/src/tools/cargo/src/bin/cargo/main.rs
index 462332fb7..d96c1423d 100644
--- a/src/tools/cargo/src/bin/cargo/main.rs
+++ b/src/tools/cargo/src/bin/cargo/main.rs
@@ -20,10 +20,7 @@ mod commands;
use crate::command_prelude::*;
fn main() {
- #[cfg(feature = "pretty-env-logger")]
- pretty_env_logger::init_custom_env("CARGO_LOG");
- #[cfg(not(feature = "pretty-env-logger"))]
- env_logger::init_from_env("CARGO_LOG");
+ setup_logger();
let mut config = cli::LazyConfig::new();
@@ -40,6 +37,16 @@ fn main() {
}
}
+fn setup_logger() {
+ let env = tracing_subscriber::EnvFilter::from_env("CARGO_LOG");
+
+ tracing_subscriber::fmt()
+ .with_ansi(std::io::IsTerminal::is_terminal(&std::io::stderr()))
+ .with_writer(std::io::stderr)
+ .with_env_filter(env)
+ .init();
+}
+
/// Table for defining the aliases which come builtin in `Cargo`.
/// The contents are structured as: `(alias, aliased_command, description)`.
const BUILTIN_ALIASES: [(&str, &str, &str); 6] = [
diff --git a/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs b/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs
index e6e41c522..754adcf3c 100644
--- a/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/build_context/target_info.rs
@@ -185,6 +185,12 @@ impl TargetInfo {
.args(&rustflags)
.env_remove("RUSTC_LOG");
+ // Removes `FD_CLOEXEC` set by `jobserver::Client` to pass jobserver
+ // as environment variables specify.
+ if let Some(client) = config.jobserver_from_env() {
+ process.inherit_jobserver(client);
+ }
+
if let CompileKind::Target(target) = kind {
process.arg("--target").arg(target.rustc_target());
}
@@ -1065,7 +1071,7 @@ impl RustDocFingerprint {
if fingerprint.rustc_vv == actual_rustdoc_target_data.rustc_vv {
return Ok(());
} else {
- log::debug!(
+ tracing::debug!(
"doc fingerprint changed:\noriginal:\n{}\nnew:\n{}",
fingerprint.rustc_vv,
actual_rustdoc_target_data.rustc_vv
@@ -1073,11 +1079,11 @@ impl RustDocFingerprint {
}
}
Err(e) => {
- log::debug!("could not deserialize {:?}: {}", fingerprint_path, e);
+ tracing::debug!("could not deserialize {:?}: {}", fingerprint_path, e);
}
};
// Fingerprint does not match, delete the doc directories and write a new fingerprint.
- log::debug!(
+ tracing::debug!(
"fingerprint {:?} mismatch, clearing doc directories",
fingerprint_path
);
diff --git a/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs b/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs
index 1c9d28461..126e17112 100644
--- a/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs
@@ -7,7 +7,7 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use lazycell::LazyCell;
-use log::debug;
+use tracing::debug;
use super::{BuildContext, CompileKind, Context, FileFlavor, Layout};
use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit};
@@ -26,7 +26,7 @@ use crate::util::{self, CargoResult, StableHasher};
const METADATA_VERSION: u8 = 2;
/// The `Metadata` is a hash used to make unique file names for each unit in a
-/// build. It is also use for symbol mangling.
+/// build. It is also used for symbol mangling.
///
/// For example:
/// - A project may depend on crate `A` and crate `B`, so the package name must be in the file name.
diff --git a/src/tools/cargo/src/cargo/core/compiler/custom_build.rs b/src/tools/cargo/src/cargo/core/compiler/custom_build.rs
index d17462174..85306aaac 100644
--- a/src/tools/cargo/src/cargo/core/compiler/custom_build.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/custom_build.rs
@@ -690,7 +690,17 @@ impl BuildOutput {
continue;
}
let data = match iter.next() {
- Some(val) => val,
+ Some(val) => {
+ if val.starts_with(":") {
+ // Line started with `cargo::`.
+ bail!("unsupported output in {}: `{}`\n\
+ Found a `cargo::key=value` build directive which is reserved for future use.\n\
+ Either change the directive to `cargo:key=value` syntax (note the single `:`) or upgrade your version of Rust.\n\
+ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \
+ for more information about build script outputs.", whence, line);
+ }
+ val
+ }
None => continue,
};
diff --git a/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs b/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
index aa8be50f7..2e6fb7eed 100644
--- a/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/fingerprint/mod.rs
@@ -366,10 +366,10 @@ use std::time::SystemTime;
use anyhow::{bail, format_err, Context as _};
use cargo_util::{paths, ProcessBuilder};
use filetime::FileTime;
-use log::{debug, info};
use serde::de;
use serde::ser;
use serde::{Deserialize, Serialize};
+use tracing::{debug, info};
use crate::core::compiler::unit_graph::UnitDep;
use crate::core::Package;
@@ -1815,7 +1815,7 @@ pub fn parse_dep_info(
let info = match EncodedDepInfo::parse(&data) {
Some(info) => info,
None => {
- log::warn!("failed to parse cargo's dep-info at {:?}", dep_info);
+ tracing::warn!("failed to parse cargo's dep-info at {:?}", dep_info);
return Ok(None);
}
};
@@ -1857,14 +1857,27 @@ where
Err(..) => return Some(StaleItem::MissingFile(reference.to_path_buf())),
};
+ let skipable_dirs = if let Ok(cargo_home) = home::cargo_home() {
+ let skipable_dirs: Vec<_> = ["git", "registry"]
+ .into_iter()
+ .map(|subfolder| cargo_home.join(subfolder))
+ .collect();
+ Some(skipable_dirs)
+ } else {
+ None
+ };
+
for path in paths {
let path = path.as_ref();
- // Assuming anything in cargo_home is immutable (see also #9455 about marking it readonly)
- // which avoids rebuilds when CI caches $CARGO_HOME/registry/{index, cache} and
- // $CARGO_HOME/git/db across runs, keeping the content the same but changing the mtime.
- if let Ok(true) = home::cargo_home().map(|home| path.starts_with(home)) {
- continue;
+ // Assuming anything in cargo_home/{git, registry} is immutable
+ // (see also #9455 about marking the src directory readonly) which avoids rebuilds when CI
+ // caches $CARGO_HOME/registry/{index, cache} and $CARGO_HOME/git/db across runs, keeping
+ // the content the same but changing the mtime.
+ if let Some(ref skipable_dirs) = skipable_dirs {
+ if skipable_dirs.iter().any(|dir| path.starts_with(dir)) {
+ continue;
+ }
}
let path_mtime = match mtime_cache.entry(path.to_path_buf()) {
Entry::Occupied(o) => *o.get(),
diff --git a/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs b/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
index 955dfb8f2..ccea28b94 100644
--- a/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/future_incompat.rs
@@ -417,7 +417,7 @@ pub fn save_and_display_report(
let current_reports = match OnDiskReports::load(bcx.ws) {
Ok(r) => r,
Err(e) => {
- log::debug!(
+ tracing::debug!(
"saving future-incompatible reports failed to load current reports: {:?}",
e
);
diff --git a/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs b/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
index 6e8866b2b..26fcd4826 100644
--- a/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/job_queue/mod.rs
@@ -125,8 +125,8 @@ use std::time::Duration;
use anyhow::{format_err, Context as _};
use cargo_util::ProcessBuilder;
use jobserver::{Acquired, HelperThread};
-use log::{debug, trace};
use semver::Version;
+use tracing::{debug, trace};
pub use self::job::Freshness::{self, Dirty, Fresh};
pub use self::job::{Job, Work};
@@ -840,7 +840,7 @@ impl<'cfg> DrainState<'cfg> {
}
err_state.count += 1;
} else {
- log::warn!("{:?}", new_err.error);
+ tracing::warn!("{:?}", new_err.error);
}
}
diff --git a/src/tools/cargo/src/cargo/core/compiler/mod.rs b/src/tools/cargo/src/cargo/core/compiler/mod.rs
index 31e63c226..7024a2ac5 100644
--- a/src/tools/cargo/src/cargo/core/compiler/mod.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/mod.rs
@@ -65,7 +65,7 @@ use std::sync::Arc;
use anyhow::{Context as _, Error};
use lazycell::LazyCell;
-use log::{debug, trace};
+use tracing::{debug, trace};
pub use self::build_config::{BuildConfig, CompileMode, MessageFormat, TimingOutput};
pub use self::build_context::{
@@ -368,7 +368,7 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc<dyn Executor>) -> Car
// See rust-lang/cargo#8348.
if output.hardlink.is_some() && output.path.exists() {
_ = paths::remove_file(&output.path).map_err(|e| {
- log::debug!(
+ tracing::debug!(
"failed to delete previous output file `{:?}`: {e:?}",
output.path
);
diff --git a/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs b/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs
index d659d620c..db98adf92 100644
--- a/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/output_depinfo.rs
@@ -9,7 +9,7 @@ use std::path::{Path, PathBuf};
use super::{fingerprint, Context, FileFlavor, Unit};
use crate::util::{internal, CargoResult};
use cargo_util::paths;
-use log::debug;
+use tracing::debug;
/// Bacially just normalizes a given path and converts it to a string.
fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {
diff --git a/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs b/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs
index f6fdd005a..aa4bd0dd4 100644
--- a/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/rustdoc.rs
@@ -112,7 +112,7 @@ pub fn add_root_urls(
) -> CargoResult<()> {
let config = cx.bcx.config;
if !config.cli_unstable().rustdoc_map {
- log::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag");
+ tracing::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag");
return Ok(());
}
let map = config.doc_extern_map()?;
@@ -125,7 +125,7 @@ pub fn add_root_urls(
if let Ok(index_url) = config.get_registry_index(name) {
Some((name, index_url))
} else {
- log::warn!(
+ tracing::warn!(
"`doc.extern-map.{}` specifies a registry that is not defined",
name
);
@@ -181,7 +181,7 @@ pub fn add_root_urls(
})?;
Some(url.to_string())
} else {
- log::warn!(
+ tracing::warn!(
"`doc.extern-map.std` is \"local\", but local docs don't appear to exist at {}",
html_root.display()
);
diff --git a/src/tools/cargo/src/cargo/core/compiler/timings.js b/src/tools/cargo/src/cargo/core/compiler/timings.js
index 986070ab0..1b7e29e01 100644
--- a/src/tools/cargo/src/cargo/core/compiler/timings.js
+++ b/src/tools/cargo/src/cargo/core/compiler/timings.js
@@ -75,6 +75,8 @@ function render_pipeline_graph() {
ctx.translate(X_LINE, MARGIN);
// Compute x,y coordinate of each block.
+ // We also populate a map with the count of each unit name to disambiguate if necessary
+ const unitCount = new Map();
UNIT_COORDS = {};
for (i=0; i<units.length; i++) {
let unit = units[i];
@@ -86,6 +88,9 @@ function render_pipeline_graph() {
}
let width = Math.max(px_per_sec * unit.duration, 1.0);
UNIT_COORDS[unit.i] = {x, y, width, rmeta_x};
+
+ const count = unitCount.get(unit.name) || 0;
+ unitCount.set(unit.name, count + 1);
}
// Draw the blocks.
@@ -111,7 +116,10 @@ function render_pipeline_graph() {
ctx.textAlign = 'start';
ctx.textBaseline = 'middle';
ctx.font = '14px sans-serif';
- const label = `${unit.name}${unit.target} ${unit.duration}s`;
+
+ const labelName = (unitCount.get(unit.name) || 0) > 1 ? `${unit.name} (v${unit.version})${unit.target}` : `${unit.name}${unit.target}`;
+ const label = `${labelName}: ${unit.duration}s`;
+
const text_info = ctx.measureText(label);
const label_x = Math.min(x + 5.0, canvas_width - text_info.width - X_LINE);
ctx.fillText(label, label_x, y + BOX_HEIGHT / 2);
diff --git a/src/tools/cargo/src/cargo/core/compiler/timings.rs b/src/tools/cargo/src/cargo/core/compiler/timings.rs
index 0e0dc03ee..57ded9bf8 100644
--- a/src/tools/cargo/src/cargo/core/compiler/timings.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/timings.rs
@@ -122,7 +122,7 @@ impl<'cfg> Timings<'cfg> {
match State::current() {
Ok(state) => Some(state),
Err(e) => {
- log::info!("failed to get CPU state, CPU tracking disabled: {:?}", e);
+ tracing::info!("failed to get CPU state, CPU tracking disabled: {:?}", e);
None
}
}
@@ -276,7 +276,7 @@ impl<'cfg> Timings<'cfg> {
let current = match State::current() {
Ok(s) => s,
Err(e) => {
- log::info!("failed to get CPU state: {:?}", e);
+ tracing::info!("failed to get CPU state: {:?}", e);
return;
}
};
diff --git a/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs b/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs
index 369fd8318..686822356 100644
--- a/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs
+++ b/src/tools/cargo/src/cargo/core/compiler/unit_dependencies.rs
@@ -17,7 +17,7 @@
use std::collections::{HashMap, HashSet};
-use log::trace;
+use tracing::trace;
use crate::core::compiler::artifact::match_artifacts_kind_with_targets;
use crate::core::compiler::unit_graph::{UnitDep, UnitGraph};
diff --git a/src/tools/cargo/src/cargo/core/dependency.rs b/src/tools/cargo/src/cargo/core/dependency.rs
index 0b3aba8ad..c8fee6262 100644
--- a/src/tools/cargo/src/cargo/core/dependency.rs
+++ b/src/tools/cargo/src/cargo/core/dependency.rs
@@ -1,5 +1,4 @@
use cargo_platform::Platform;
-use log::trace;
use semver::VersionReq;
use serde::ser;
use serde::Serialize;
@@ -7,6 +6,7 @@ use std::borrow::Cow;
use std::fmt;
use std::path::PathBuf;
use std::rc::Rc;
+use tracing::trace;
use crate::core::compiler::{CompileKind, CompileTarget};
use crate::core::{PackageId, SourceId, Summary};
diff --git a/src/tools/cargo/src/cargo/core/package.rs b/src/tools/cargo/src/cargo/core/package.rs
index f4ab448d2..c84941462 100644
--- a/src/tools/cargo/src/cargo/core/package.rs
+++ b/src/tools/cargo/src/cargo/core/package.rs
@@ -13,9 +13,9 @@ use bytesize::ByteSize;
use curl::easy::Easy;
use curl::multi::{EasyHandle, Multi};
use lazycell::LazyCell;
-use log::debug;
use semver::Version;
use serde::Serialize;
+use tracing::debug;
use crate::core::compiler::{CompileKind, RustcTargetData};
use crate::core::dependency::DepKind;
@@ -25,7 +25,7 @@ use crate::core::source::MaybePackage;
use crate::core::{Dependency, Manifest, PackageId, SourceId, Target};
use crate::core::{SourceMap, Summary, Workspace};
use crate::util::config::PackageCacheLock;
-use crate::util::errors::{CargoResult, HttpNotSuccessful, DEBUG_HEADERS};
+use crate::util::errors::{CargoResult, HttpNotSuccessful};
use crate::util::interning::InternedString;
use crate::util::network::http::http_handle_and_timeout;
use crate::util::network::http::HttpTimeout;
@@ -748,9 +748,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> {
// Headers contain trailing \r\n, trim them to make it easier
// to work with.
let h = String::from_utf8_lossy(data).trim().to_string();
- if DEBUG_HEADERS.iter().any(|p| h.starts_with(p)) {
- downloads.pending[&token].0.headers.borrow_mut().push(h);
- }
+ downloads.pending[&token].0.headers.borrow_mut().push(h);
}
});
true
diff --git a/src/tools/cargo/src/cargo/core/registry.rs b/src/tools/cargo/src/cargo/core/registry.rs
index e20531b70..da3d612d0 100644
--- a/src/tools/cargo/src/cargo/core/registry.rs
+++ b/src/tools/cargo/src/cargo/core/registry.rs
@@ -8,7 +8,7 @@ use crate::util::errors::CargoResult;
use crate::util::interning::InternedString;
use crate::util::{CanonicalUrl, Config};
use anyhow::{bail, Context as _};
-use log::{debug, trace};
+use tracing::{debug, trace};
use url::Url;
/// Source of information about a group of packages.
@@ -876,7 +876,7 @@ fn summary_for_patch(
// Since the locked patch did not match anything, try the unlocked one.
let orig_matches =
ready!(source.query_vec(orig_patch, QueryKind::Exact)).unwrap_or_else(|e| {
- log::warn!(
+ tracing::warn!(
"could not determine unlocked summaries for dep {:?}: {:?}",
orig_patch,
e
@@ -895,7 +895,7 @@ fn summary_for_patch(
let name_summaries =
ready!(source.query_vec(&name_only_dep, QueryKind::Exact)).unwrap_or_else(|e| {
- log::warn!(
+ tracing::warn!(
"failed to do name-only summary query for {:?}: {:?}",
name_only_dep,
e
diff --git a/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs b/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs
index 10c41761d..fba497506 100644
--- a/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/conflict_cache.rs
@@ -1,6 +1,6 @@
use std::collections::{BTreeMap, HashMap, HashSet};
-use log::trace;
+use tracing::trace;
use super::types::ConflictMap;
use crate::core::resolver::Context;
diff --git a/src/tools/cargo/src/cargo/core/resolver/context.rs b/src/tools/cargo/src/cargo/core/resolver/context.rs
index 4854dcde7..f19c678a6 100644
--- a/src/tools/cargo/src/cargo/core/resolver/context.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/context.rs
@@ -6,9 +6,9 @@ use crate::core::{Dependency, PackageId, SourceId, Summary};
use crate::util::interning::InternedString;
use crate::util::Graph;
use anyhow::format_err;
-use log::debug;
use std::collections::HashMap;
use std::num::NonZeroU64;
+use tracing::debug;
pub use super::encode::Metadata;
pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
diff --git a/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs b/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
index 54b0ce97f..997533014 100644
--- a/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/dep_cache.rs
@@ -23,10 +23,10 @@ use crate::util::errors::CargoResult;
use crate::util::interning::InternedString;
use anyhow::Context as _;
-use log::debug;
use std::collections::{BTreeSet, HashMap, HashSet};
use std::rc::Rc;
use std::task::Poll;
+use tracing::debug;
pub struct RegistryQueryer<'a> {
pub registry: &'a mut (dyn Registry + 'a),
diff --git a/src/tools/cargo/src/cargo/core/resolver/encode.rs b/src/tools/cargo/src/cargo/core/resolver/encode.rs
index f73d023b1..1ee0d23f4 100644
--- a/src/tools/cargo/src/cargo/core/resolver/encode.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/encode.rs
@@ -117,13 +117,13 @@ use crate::util::errors::CargoResult;
use crate::util::interning::InternedString;
use crate::util::{internal, Graph};
use anyhow::{bail, Context as _};
-use log::debug;
use serde::de;
use serde::ser;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
+use tracing::debug;
/// The `Cargo.lock` structure.
#[derive(Serialize, Deserialize, Debug)]
@@ -196,13 +196,13 @@ impl EncodableResolve {
let enc_id = EncodablePackageId {
name: pkg.name.clone(),
version: Some(pkg.version.clone()),
- source: pkg.source,
+ source: pkg.source.clone(),
};
if !all_pkgs.insert(enc_id.clone()) {
anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name);
}
- let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
+ let id = match pkg.source.as_deref().or_else(|| path_deps.get(&pkg.name)) {
// We failed to find a local package in the workspace.
// It must have been removed and should be ignored.
None => {
@@ -366,7 +366,7 @@ impl EncodableResolve {
let mut unused_patches = Vec::new();
for pkg in self.patch.unused {
- let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
+ let id = match pkg.source.as_deref().or_else(|| path_deps.get(&pkg.name)) {
Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?,
None => continue,
};
@@ -488,17 +488,95 @@ impl Patch {
pub struct EncodableDependency {
name: String,
version: String,
- source: Option<SourceId>,
+ source: Option<EncodableSourceId>,
checksum: Option<String>,
dependencies: Option<Vec<EncodablePackageId>>,
replace: Option<EncodablePackageId>,
}
+/// Pretty much equivalent to [`SourceId`] with a different serialization method.
+///
+/// The serialization for `SourceId` doesn't do URL encode for parameters.
+/// In contrast, this type is aware of that whenever [`ResolveVersion`] allows
+/// us to do so (v4 or later).
+///
+/// [`EncodableResolve`] turns into a `
+#[derive(Deserialize, Debug, PartialOrd, Ord, Clone)]
+#[serde(transparent)]
+pub struct EncodableSourceId {
+ inner: SourceId,
+ /// We don't care about the deserialization of this, as the `url` crate
+ /// will always decode as the URL was encoded. Only when a [`Resolve`]
+ /// turns into a [`EncodableResolve`] will it set the value accordingly
+ /// via [`encodable_source_id`].
+ #[serde(skip)]
+ encoded: bool,
+}
+
+impl EncodableSourceId {
+ /// Creates a `EncodableSourceId` that always encodes URL params.
+ fn new(inner: SourceId) -> Self {
+ Self {
+ inner,
+ encoded: true,
+ }
+ }
+
+ /// Creates a `EncodableSourceId` that doesn't encode URL params. This is
+ /// for backward compatibility for order lockfile version.
+ fn without_url_encoded(inner: SourceId) -> Self {
+ Self {
+ inner,
+ encoded: false,
+ }
+ }
+
+ /// Encodes the inner [`SourceId`] as a URL.
+ fn as_url(&self) -> impl fmt::Display + '_ {
+ if self.encoded {
+ self.inner.as_encoded_url()
+ } else {
+ self.inner.as_url()
+ }
+ }
+}
+
+impl std::ops::Deref for EncodableSourceId {
+ type Target = SourceId;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+
+impl ser::Serialize for EncodableSourceId {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: ser::Serializer,
+ {
+ s.collect_str(&self.as_url())
+ }
+}
+
+impl std::hash::Hash for EncodableSourceId {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.inner.hash(state)
+ }
+}
+
+impl std::cmp::PartialEq for EncodableSourceId {
+ fn eq(&self, other: &Self) -> bool {
+ self.inner == other.inner
+ }
+}
+
+impl std::cmp::Eq for EncodableSourceId {}
+
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)]
pub struct EncodablePackageId {
name: String,
version: Option<String>,
- source: Option<SourceId>,
+ source: Option<EncodableSourceId>,
}
impl fmt::Display for EncodablePackageId {
@@ -535,7 +613,8 @@ impl FromStr for EncodablePackageId {
Ok(EncodablePackageId {
name: name.to_string(),
version: version.map(|v| v.to_string()),
- source: source_id,
+ // Default to url encoded.
+ source: source_id.map(EncodableSourceId::new),
})
}
}
@@ -603,7 +682,7 @@ impl ser::Serialize for Resolve {
.map(|id| EncodableDependency {
name: id.name().to_string(),
version: id.version().to_string(),
- source: encode_source(id.source_id()),
+ source: encodable_source_id(id.source_id(), self.version()),
dependencies: None,
replace: None,
checksum: if self.version() >= ResolveVersion::V2 {
@@ -676,7 +755,7 @@ fn encodable_resolve_node(
EncodableDependency {
name: id.name().to_string(),
version: id.version().to_string(),
- source: encode_source(id.source_id()),
+ source: encodable_source_id(id.source_id(), resolve.version()),
dependencies: deps,
replace,
checksum: if resolve.version() >= ResolveVersion::V2 {
@@ -702,7 +781,7 @@ pub fn encodable_package_id(
}
}
}
- let mut source = encode_source(id_to_encode).map(|s| s.with_precise(None));
+ let mut source = encodable_source_id(id_to_encode.with_precise(None), resolve_version);
if let Some(counts) = &state.counts {
let version_counts = &counts[&id.name()];
if version_counts[&id.version()] == 1 {
@@ -719,10 +798,13 @@ pub fn encodable_package_id(
}
}
-fn encode_source(id: SourceId) -> Option<SourceId> {
+fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option<EncodableSourceId> {
if id.is_path() {
None
} else {
- Some(id)
+ Some(match version {
+ ResolveVersion::V4 => EncodableSourceId::new(id),
+ _ => EncodableSourceId::without_url_encoded(id),
+ })
}
}
diff --git a/src/tools/cargo/src/cargo/core/resolver/features.rs b/src/tools/cargo/src/cargo/core/resolver/features.rs
index 3670e8711..4518f9fe7 100644
--- a/src/tools/cargo/src/cargo/core/resolver/features.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/features.rs
@@ -470,7 +470,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
deferred_weak_dependencies: HashMap::new(),
};
r.do_resolve(specs, cli_features)?;
- log::debug!("features={:#?}", r.activated_features);
+ tracing::debug!("features={:#?}", r.activated_features);
if r.opts.compare {
r.compare();
}
@@ -518,7 +518,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
fk: FeaturesFor,
fvs: &[FeatureValue],
) -> CargoResult<()> {
- log::trace!("activate_pkg {} {}", pkg_id.name(), fk);
+ tracing::trace!("activate_pkg {} {}", pkg_id.name(), fk);
// Add an empty entry to ensure everything is covered. This is intended for
// finding bugs where the resolver missed something it should have visited.
// Remove this in the future if `activated_features` uses an empty default.
@@ -566,7 +566,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
fk: FeaturesFor,
fv: &FeatureValue,
) -> CargoResult<()> {
- log::trace!("activate_fv {} {} {}", pkg_id.name(), fk, fv);
+ tracing::trace!("activate_fv {} {} {}", pkg_id.name(), fk, fv);
match fv {
FeatureValue::Feature(f) => {
self.activate_rec(pkg_id, fk, *f)?;
@@ -593,7 +593,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
fk: FeaturesFor,
feature_to_enable: InternedString,
) -> CargoResult<()> {
- log::trace!(
+ tracing::trace!(
"activate_rec {} {} feat={}",
pkg_id.name(),
fk,
@@ -615,7 +615,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
// TODO: this should only happen for optional dependencies.
// Other cases should be validated by Summary's `build_feature_map`.
// Figure out some way to validate this assumption.
- log::debug!(
+ tracing::debug!(
"pkg {:?} does not define feature {}",
pkg_id,
feature_to_enable
@@ -654,7 +654,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
}
if let Some(to_enable) = &to_enable {
for dep_feature in to_enable {
- log::trace!(
+ tracing::trace!(
"activate deferred {} {} -> {}/{}",
pkg_id.name(),
fk,
@@ -697,7 +697,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> {
{
// This is weak, but not yet activated. Defer in case
// something comes along later and enables it.
- log::trace!(
+ tracing::trace!(
"deferring feature {} {} -> {}/{}",
pkg_id.name(),
fk,
diff --git a/src/tools/cargo/src/cargo/core/resolver/mod.rs b/src/tools/cargo/src/cargo/core/resolver/mod.rs
index b9c29fb87..e3da6fe5a 100644
--- a/src/tools/cargo/src/cargo/core/resolver/mod.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/mod.rs
@@ -63,7 +63,7 @@ use std::mem;
use std::rc::Rc;
use std::time::{Duration, Instant};
-use log::{debug, trace};
+use tracing::{debug, trace};
use crate::core::PackageIdSpec;
use crate::core::{Dependency, PackageId, Registry, Summary};
diff --git a/src/tools/cargo/src/cargo/core/resolver/resolve.rs b/src/tools/cargo/src/cargo/core/resolver/resolve.rs
index 8405a1245..18a389773 100644
--- a/src/tools/cargo/src/cargo/core/resolver/resolve.rs
+++ b/src/tools/cargo/src/cargo/core/resolver/resolve.rs
@@ -83,6 +83,8 @@ pub enum ResolveVersion {
/// Unstable. Will collect a certain amount of changes and then go.
///
/// Changes made:
+ ///
+ /// * SourceId URL serialization is aware of URL encoding.
V4,
}
diff --git a/src/tools/cargo/src/cargo/core/source/source_id.rs b/src/tools/cargo/src/cargo/core/source/source_id.rs
index 4064364d5..6bbc07a5d 100644
--- a/src/tools/cargo/src/cargo/core/source/source_id.rs
+++ b/src/tools/cargo/src/cargo/core/source/source_id.rs
@@ -3,7 +3,6 @@ use crate::sources::registry::CRATES_IO_HTTP_INDEX;
use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY};
use crate::sources::{GitSource, PathSource, RegistrySource};
use crate::util::{config, CanonicalUrl, CargoResult, Config, IntoUrl};
-use log::trace;
use serde::de;
use serde::ser;
use std::cmp::{self, Ordering};
@@ -14,6 +13,7 @@ use std::path::{Path, PathBuf};
use std::ptr;
use std::sync::Mutex;
use std::sync::OnceLock;
+use tracing::trace;
use url::Url;
static SOURCE_ID_CACHE: OnceLock<Mutex<HashSet<&'static SourceIdInner>>> = OnceLock::new();
@@ -195,6 +195,15 @@ impl SourceId {
pub fn as_url(&self) -> SourceIdAsUrl<'_> {
SourceIdAsUrl {
inner: &*self.inner,
+ encoded: false,
+ }
+ }
+
+ /// Like [`Self::as_url`] but with URL parameters encoded.
+ pub fn as_encoded_url(&self) -> SourceIdAsUrl<'_> {
+ SourceIdAsUrl {
+ inner: &*self.inner,
+ encoded: true,
}
}
@@ -566,7 +575,10 @@ impl fmt::Display for SourceId {
// Don't replace the URL display for git references,
// because those are kind of expected to be URLs.
write!(f, "{}", self.inner.url)?;
- if let Some(pretty) = reference.pretty_ref() {
+ // TODO(-Znext-lockfile-bump): set it to true when stabilizing
+ // lockfile v4, because we want Source ID serialization to be
+ // consistent with lockfile.
+ if let Some(pretty) = reference.pretty_ref(false) {
write!(f, "?{}", pretty)?;
}
@@ -714,6 +726,7 @@ impl Ord for SourceKind {
/// A `Display`able view into a `SourceId` that will write it as a url
pub struct SourceIdAsUrl<'a> {
inner: &'a SourceIdInner,
+ encoded: bool,
}
impl<'a> fmt::Display for SourceIdAsUrl<'a> {
@@ -731,7 +744,7 @@ impl<'a> fmt::Display for SourceIdAsUrl<'a> {
..
} => {
write!(f, "git+{}", url)?;
- if let Some(pretty) = reference.pretty_ref() {
+ if let Some(pretty) = reference.pretty_ref(self.encoded) {
write!(f, "?{}", pretty)?;
}
if let Some(precise) = precise.as_ref() {
@@ -771,10 +784,13 @@ impl<'a> fmt::Display for SourceIdAsUrl<'a> {
impl GitReference {
/// Returns a `Display`able view of this git reference, or None if using
/// the head of the default branch
- pub fn pretty_ref(&self) -> Option<PrettyRef<'_>> {
+ pub fn pretty_ref(&self, url_encoded: bool) -> Option<PrettyRef<'_>> {
match self {
GitReference::DefaultBranch => None,
- _ => Some(PrettyRef { inner: self }),
+ _ => Some(PrettyRef {
+ inner: self,
+ url_encoded,
+ }),
}
}
}
@@ -782,16 +798,35 @@ impl GitReference {
/// A git reference that can be `Display`ed
pub struct PrettyRef<'a> {
inner: &'a GitReference,
+ url_encoded: bool,
}
impl<'a> fmt::Display for PrettyRef<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match *self.inner {
- GitReference::Branch(ref b) => write!(f, "branch={}", b),
- GitReference::Tag(ref s) => write!(f, "tag={}", s),
- GitReference::Rev(ref s) => write!(f, "rev={}", s),
+ let value: &str;
+ match self.inner {
+ GitReference::Branch(s) => {
+ write!(f, "branch=")?;
+ value = s;
+ }
+ GitReference::Tag(s) => {
+ write!(f, "tag=")?;
+ value = s;
+ }
+ GitReference::Rev(s) => {
+ write!(f, "rev=")?;
+ value = s;
+ }
GitReference::DefaultBranch => unreachable!(),
}
+ if self.url_encoded {
+ for value in url::form_urlencoded::byte_serialize(value.as_bytes()) {
+ write!(f, "{value}")?;
+ }
+ } else {
+ write!(f, "{value}")?;
+ }
+ Ok(())
}
}
@@ -905,6 +940,27 @@ mod tests {
assert_eq!(formatted, "sparse+https://my-crates.io/");
assert_eq!(source_id, deserialized);
}
+
+ #[test]
+ fn gitrefs_roundtrip() {
+ let base = "https://host/path".into_url().unwrap();
+ let branch = GitReference::Branch("*-._+20%30 Z/z#foo=bar&zap[]?to\\()'\"".to_string());
+ let s1 = SourceId::for_git(&base, branch).unwrap();
+ let ser1 = format!("{}", s1.as_encoded_url());
+ let s2 = SourceId::from_url(&ser1).expect("Failed to deserialize");
+ let ser2 = format!("{}", s2.as_encoded_url());
+ // Serializing twice should yield the same result
+ assert_eq!(ser1, ser2, "Serialized forms don't match");
+ // SourceId serializing the same should have the same semantics
+ // This used to not be the case (# was ambiguous)
+ assert_eq!(s1, s2, "SourceId doesn't round-trip");
+ // Freeze the format to match an x-www-form-urlencoded query string
+ // https://url.spec.whatwg.org/#application/x-www-form-urlencoded
+ assert_eq!(
+ ser1,
+ "git+https://host/path?branch=*-._%2B20%2530+Z%2Fz%23foo%3Dbar%26zap%5B%5D%3Fto%5C%28%29%27%22"
+ );
+ }
}
/// Check if `url` equals to the overridden crates.io URL.
diff --git a/src/tools/cargo/src/cargo/core/workspace.rs b/src/tools/cargo/src/cargo/core/workspace.rs
index db9c18010..9ee0cbe04 100644
--- a/src/tools/cargo/src/cargo/core/workspace.rs
+++ b/src/tools/cargo/src/cargo/core/workspace.rs
@@ -7,7 +7,7 @@ use std::rc::Rc;
use anyhow::{anyhow, bail, Context as _};
use glob::glob;
use itertools::Itertools;
-use log::debug;
+use tracing::debug;
use url::Url;
use crate::core::compiler::Unit;
diff --git a/src/tools/cargo/src/cargo/lib.rs b/src/tools/cargo/src/cargo/lib.rs
index a03d51199..9f6edf80d 100644
--- a/src/tools/cargo/src/cargo/lib.rs
+++ b/src/tools/cargo/src/cargo/lib.rs
@@ -147,7 +147,7 @@
use crate::core::shell::Verbosity::Verbose;
use crate::core::Shell;
use anyhow::Error;
-use log::debug;
+use tracing::debug;
pub use crate::util::errors::{AlreadyPrintedError, InternalError, VerboseError};
pub use crate::util::{indented_lines, CargoResult, CliError, CliResult, Config};
diff --git a/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs b/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
index f53a9e934..1247ceda7 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_compile/mod.rs
@@ -5,7 +5,7 @@
//! rough outline is:
//!
//! 1. Resolve the dependency graph (see [`ops::resolve`]).
-//! 2. Download any packages needed (see [`PackageSet`](crate::core::PackageSet)).
+//! 2. Download any packages needed (see [`PackageSet`].
//! 3. Generate a list of top-level "units" of work for the targets the user
//! requested on the command-line. Each [`Unit`] corresponds to a compiler
//! invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
@@ -753,7 +753,7 @@ fn remove_duplicate_doc(
.into_iter()
.partition(|unit| cb(unit) && !root_units.contains(unit));
for unit in to_remove {
- log::debug!(
+ tracing::debug!(
"removing duplicate doc due to {} for package {} target `{}`",
reason,
unit.pkg,
diff --git a/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs b/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
index 6267b08f5..fddf83f19 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_generate_lockfile.rs
@@ -6,9 +6,9 @@ use crate::ops;
use crate::util::config::Config;
use crate::util::CargoResult;
use anyhow::Context;
-use log::debug;
use std::collections::{BTreeMap, HashSet};
use termcolor::Color::{self, Cyan, Green, Red, Yellow};
+use tracing::debug;
pub struct UpdateOptions<'a> {
pub config: &'a Config,
diff --git a/src/tools/cargo/src/cargo/ops/cargo_new.rs b/src/tools/cargo/src/cargo/ops/cargo_new.rs
index b113671b0..0809cefc3 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_new.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_new.rs
@@ -879,7 +879,7 @@ mod tests {
.arg(&path_of_source_file)
.exec_with_output()
{
- log::warn!("failed to call rustfmt: {:#}", e);
+ tracing::warn!("failed to call rustfmt: {:#}", e);
}
}
}
diff --git a/src/tools/cargo/src/cargo/ops/cargo_package.rs b/src/tools/cargo/src/cargo/ops/cargo_package.rs
index a322afbb3..93469607b 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_package.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_package.rs
@@ -22,9 +22,10 @@ use anyhow::Context as _;
use cargo_util::paths;
use flate2::read::GzDecoder;
use flate2::{Compression, GzBuilder};
-use log::debug;
use serde::Serialize;
use tar::{Archive, Builder, EntryType, Header, HeaderMode};
+use tracing::debug;
+use unicase::Ascii as UncasedAscii;
pub struct PackageOpts<'cfg> {
pub config: &'cfg Config,
@@ -227,58 +228,84 @@ fn build_ar_list(
src_files: Vec<PathBuf>,
vcs_info: Option<VcsInfo>,
) -> CargoResult<Vec<ArchiveFile>> {
- let mut result = Vec::new();
+ let mut result = HashMap::new();
let root = pkg.root();
- for src_file in src_files {
- let rel_path = src_file.strip_prefix(&root)?.to_path_buf();
- check_filename(&rel_path, &mut ws.config().shell())?;
- let rel_str = rel_path
- .to_str()
- .ok_or_else(|| {
- anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
- })?
- .to_string();
+
+ for src_file in &src_files {
+ let rel_path = src_file.strip_prefix(&root)?;
+ check_filename(rel_path, &mut ws.config().shell())?;
+ let rel_str = rel_path.to_str().ok_or_else(|| {
+ anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
+ })?;
match rel_str.as_ref() {
- "Cargo.toml" => {
- result.push(ArchiveFile {
- rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
- rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
- contents: FileContents::OnDisk(src_file),
- });
- result.push(ArchiveFile {
- rel_path,
- rel_str,
- contents: FileContents::Generated(GeneratedFile::Manifest),
- });
- }
"Cargo.lock" => continue,
VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
"invalid inclusion of reserved file name {} in package source",
rel_str
),
_ => {
- result.push(ArchiveFile {
- rel_path,
- rel_str,
- contents: FileContents::OnDisk(src_file),
- });
+ result
+ .entry(UncasedAscii::new(rel_str))
+ .or_insert_with(Vec::new)
+ .push(ArchiveFile {
+ rel_path: rel_path.to_owned(),
+ rel_str: rel_str.to_owned(),
+ contents: FileContents::OnDisk(src_file.clone()),
+ });
}
}
}
+
+ // Ensure we normalize for case insensitive filesystems (like on Windows) by removing the
+ // existing entry, regardless of case, and adding in with the correct case
+ if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
+ result
+ .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
+ .or_insert_with(Vec::new)
+ .push(ArchiveFile {
+ rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
+ rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
+ contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
+ });
+ result
+ .entry(UncasedAscii::new("Cargo.toml"))
+ .or_insert_with(Vec::new)
+ .push(ArchiveFile {
+ rel_path: PathBuf::from("Cargo.toml"),
+ rel_str: "Cargo.toml".to_string(),
+ contents: FileContents::Generated(GeneratedFile::Manifest),
+ });
+ } else {
+ ws.config().shell().warn(&format!(
+ "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
+ pkg.name()
+ ))?;
+ }
+
if pkg.include_lockfile() {
- result.push(ArchiveFile {
- rel_path: PathBuf::from("Cargo.lock"),
- rel_str: "Cargo.lock".to_string(),
- contents: FileContents::Generated(GeneratedFile::Lockfile),
- });
+ let rel_str = "Cargo.lock";
+ result
+ .entry(UncasedAscii::new(rel_str))
+ .or_insert_with(Vec::new)
+ .push(ArchiveFile {
+ rel_path: PathBuf::from(rel_str),
+ rel_str: rel_str.to_string(),
+ contents: FileContents::Generated(GeneratedFile::Lockfile),
+ });
}
if let Some(vcs_info) = vcs_info {
- result.push(ArchiveFile {
- rel_path: PathBuf::from(VCS_INFO_FILE),
- rel_str: VCS_INFO_FILE.to_string(),
- contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
- });
- }
+ let rel_str = VCS_INFO_FILE;
+ result
+ .entry(UncasedAscii::new(rel_str))
+ .or_insert_with(Vec::new)
+ .push(ArchiveFile {
+ rel_path: PathBuf::from(rel_str),
+ rel_str: rel_str.to_string(),
+ contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
+ });
+ }
+
+ let mut result = result.into_values().flatten().collect();
if let Some(license_file) = &pkg.manifest().metadata().license_file {
let license_path = Path::new(license_file);
let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
diff --git a/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs b/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs
index 2dfe90086..d9daea5da 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_read_manifest.rs
@@ -9,7 +9,7 @@ use crate::util::important_paths::find_project_manifest_exact;
use crate::util::toml::read_manifest;
use crate::util::Config;
use cargo_util::paths;
-use log::{info, trace};
+use tracing::{info, trace};
pub fn read_package(
path: &Path,
diff --git a/src/tools/cargo/src/cargo/ops/cargo_run.rs b/src/tools/cargo/src/cargo/ops/cargo_run.rs
index 53916715a..adf144ac2 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_run.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_run.rs
@@ -100,6 +100,10 @@ pub fn run(
// by `compile.target_process` (the package's root directory)
process.args(args).cwd(config.cwd());
+ if config.extra_verbose() {
+ process.display_env_vars();
+ }
+
config.shell().status("Running", process.to_string())?;
process.exec_replace()
diff --git a/src/tools/cargo/src/cargo/ops/cargo_test.rs b/src/tools/cargo/src/cargo/ops/cargo_test.rs
index 1ddf7755f..0d0bd800f 100644
--- a/src/tools/cargo/src/cargo/ops/cargo_test.rs
+++ b/src/tools/cargo/src/cargo/ops/cargo_test.rs
@@ -126,7 +126,7 @@ fn run_unit_tests(
script_meta,
} in compilation.tests.iter()
{
- let (exe_display, cmd) = cmd_builds(
+ let (exe_display, mut cmd) = cmd_builds(
config,
cwd,
unit,
@@ -136,6 +136,11 @@ fn run_unit_tests(
compilation,
"unittests",
)?;
+
+ if config.extra_verbose() {
+ cmd.display_env_vars();
+ }
+
config
.shell()
.concise(|shell| shell.status("Running", &exe_display))?;
@@ -149,7 +154,7 @@ fn run_unit_tests(
unit: unit.clone(),
kind: test_kind,
};
- report_test_error(ws, &options.compile_opts, &unit_err, e);
+ report_test_error(ws, test_args, &options.compile_opts, &unit_err, e);
errors.push(unit_err);
if !options.no_fail_fast {
return Err(CliError::code(code));
@@ -260,22 +265,29 @@ fn run_doc_tests(
p.arg("--test-args").arg("--quiet");
}
+ p.args(unit.pkg.manifest().lint_rustflags());
+
p.args(args);
if *unstable_opts {
p.arg("-Zunstable-options");
}
+ if config.extra_verbose() {
+ p.display_env_vars();
+ }
+
config
.shell()
.verbose(|shell| shell.status("Running", p.to_string()))?;
+
if let Err(e) = p.exec() {
let code = fail_fast_code(&e);
let unit_err = UnitTestError {
unit: unit.clone(),
kind: TestKind::Doctest,
};
- report_test_error(ws, &options.compile_opts, &unit_err, e);
+ report_test_error(ws, test_args, &options.compile_opts, &unit_err, e);
errors.push(unit_err);
if !options.no_fail_fast {
return Err(CliError::code(code));
@@ -407,6 +419,7 @@ fn no_fail_fast_err(
/// Displays an error on the console about a test failure.
fn report_test_error(
ws: &Workspace<'_>,
+ test_args: &[&str],
opts: &ops::CompileOptions,
unit_err: &UnitTestError,
test_error: anyhow::Error,
@@ -420,13 +433,23 @@ fn report_test_error(
let mut err = format_err!("{}, to rerun pass `{}`", which, unit_err.cli_args(ws, opts));
// Don't show "process didn't exit successfully" for simple errors.
// libtest exits with 101 for normal errors.
- let is_simple = test_error
+ let (is_simple, executed) = test_error
.downcast_ref::<ProcessError>()
.and_then(|proc_err| proc_err.code)
- .map_or(false, |code| code == 101);
+ .map_or((false, false), |code| (code == 101, true));
+
if !is_simple {
err = test_error.context(err);
}
crate::display_error(&err, &mut ws.config().shell());
+
+ let harness: bool = unit_err.unit.target.harness();
+ let nocapture: bool = test_args.contains(&"--nocapture");
+
+ if !is_simple && executed && harness && !nocapture {
+ drop(ws.config().shell().note(
+ "test exited abnormally; to see the full output pass --nocapture to the harness.",
+ ));
+ }
}
diff --git a/src/tools/cargo/src/cargo/ops/fix.rs b/src/tools/cargo/src/cargo/ops/fix.rs
index be24967f8..0e678d61c 100644
--- a/src/tools/cargo/src/cargo/ops/fix.rs
+++ b/src/tools/cargo/src/cargo/ops/fix.rs
@@ -46,10 +46,10 @@ use std::{env, fs, str};
use anyhow::{bail, Context as _};
use cargo_util::{exit_status_to_string, is_simple_exit_code, paths, ProcessBuilder};
-use log::{debug, trace, warn};
use rustfix::diagnostics::Diagnostic;
use rustfix::{self, CodeFix};
use semver::Version;
+use tracing::{debug, trace, warn};
use crate::core::compiler::RustcTargetData;
use crate::core::resolver::features::{DiffMap, FeatureOpts, FeatureResolver, FeaturesFor};
diff --git a/src/tools/cargo/src/cargo/ops/registry/login.rs b/src/tools/cargo/src/cargo/ops/registry/login.rs
index 1e2b3a87b..e52373734 100644
--- a/src/tools/cargo/src/cargo/ops/registry/login.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/login.rs
@@ -5,40 +5,27 @@
//!
//! [1]: https://doc.rust-lang.org/nightly/cargo/reference/registry-web-api.html#login
-use std::io;
-use std::io::BufRead;
+use std::io::IsTerminal;
-use anyhow::anyhow;
-use anyhow::bail;
-use anyhow::Context as _;
-use pasetors::keys::AsymmetricKeyPair;
-use pasetors::keys::Generate as _;
-use pasetors::paserk::FormatAsPaserk;
-
-use crate::drop_println;
-use crate::ops::RegistryCredentialConfig;
-use crate::sources::CRATES_IO_DOMAIN;
use crate::util::auth;
-use crate::util::auth::paserk_public_from_paserk_secret;
use crate::util::auth::AuthorizationError;
-use crate::util::auth::Secret;
use crate::CargoResult;
use crate::Config;
+use cargo_credential::LoginOptions;
+use cargo_credential::Secret;
use super::get_source_id;
+use super::registry;
pub fn registry_login(
config: &Config,
- token: Option<Secret<&str>>,
+ token_from_cmdline: Option<Secret<&str>>,
reg: Option<&str>,
- generate_keypair: bool,
- secret_key_required: bool,
- key_subject: Option<&str>,
+ args: &[&str],
) -> CargoResult<()> {
let source_ids = get_source_id(config, None, reg)?;
- let reg_cfg = auth::registry_credential_config(config, &source_ids.original)?;
- let login_url = match super::registry(config, token.clone(), None, reg, false, None) {
+ let login_url = match registry(config, token_from_cmdline.clone(), None, reg, false, None) {
Ok((registry, _)) => Some(format!("{}/me", registry.host())),
Err(e) if e.is::<AuthorizationError>() => e
.downcast::<AuthorizationError>()
@@ -47,114 +34,23 @@ pub fn registry_login(
.map(|u| u.to_string()),
Err(e) => return Err(e),
};
- let new_token;
- if generate_keypair || secret_key_required || key_subject.is_some() {
- if !config.cli_unstable().registry_auth {
- let flag = if generate_keypair {
- "generate-keypair"
- } else if secret_key_required {
- "secret-key"
- } else if key_subject.is_some() {
- "key-subject"
- } else {
- unreachable!("how did we get here");
- };
- bail!(
- "the `{flag}` flag is unstable, pass `-Z registry-auth` to enable it\n\
- See https://github.com/rust-lang/cargo/issues/10519 for more \
- information about the `{flag}` flag."
- );
- }
- assert!(token.is_none());
- // we are dealing with asymmetric tokens
- let (old_secret_key, old_key_subject) = match &reg_cfg {
- RegistryCredentialConfig::AsymmetricKey((old_secret_key, old_key_subject)) => {
- (Some(old_secret_key), old_key_subject.clone())
- }
- _ => (None, None),
- };
- let secret_key: Secret<String>;
- if generate_keypair {
- assert!(!secret_key_required);
- let kp = AsymmetricKeyPair::<pasetors::version3::V3>::generate().unwrap();
- secret_key = Secret::default().map(|mut key| {
- FormatAsPaserk::fmt(&kp.secret, &mut key).unwrap();
- key
- });
- } else if secret_key_required {
- assert!(!generate_keypair);
- drop_println!(config, "please paste the API secret key below");
- secret_key = Secret::default()
- .map(|mut line| {
- let input = io::stdin();
- input
- .lock()
- .read_line(&mut line)
- .with_context(|| "failed to read stdin")
- .map(|_| line.trim().to_string())
- })
- .transpose()?;
- } else {
- secret_key = old_secret_key
- .cloned()
- .ok_or_else(|| anyhow!("need a secret_key to set a key_subject"))?;
- }
- if let Some(p) = paserk_public_from_paserk_secret(secret_key.as_deref()) {
- drop_println!(config, "{}", &p);
- } else {
- bail!("not a validly formatted PASERK secret key");
- }
- new_token = RegistryCredentialConfig::AsymmetricKey((
- secret_key,
- match key_subject {
- Some(key_subject) => Some(key_subject.to_string()),
- None => old_key_subject,
- },
- ));
- } else {
- new_token = RegistryCredentialConfig::Token(match token {
- Some(token) => token.owned(),
- None => {
- if let Some(login_url) = login_url {
- drop_println!(
- config,
- "please paste the token found on {} below",
- login_url
- )
- } else {
- drop_println!(
- config,
- "please paste the token for {} below",
- source_ids.original.display_registry_name()
- )
- }
- let mut line = String::new();
- let input = io::stdin();
- input
- .lock()
- .read_line(&mut line)
- .with_context(|| "failed to read stdin")?;
- // Automatically remove `cargo login` from an inputted token to
- // allow direct pastes from `registry.host()`/me.
- Secret::from(line.replace("cargo login", "").trim().to_string())
+ let mut token_from_stdin = None;
+ let token = token_from_cmdline.or_else(|| {
+ if !std::io::stdin().is_terminal() {
+ let token = std::io::read_to_string(std::io::stdin()).unwrap_or_default();
+ if !token.is_empty() {
+ token_from_stdin = Some(token);
}
- });
-
- if let Some(tok) = new_token.as_token() {
- crates_io::check_token(tok.as_ref().expose())?;
}
- }
- if &reg_cfg == &new_token {
- config.shell().status("Login", "already logged in")?;
- return Ok(());
- }
+ token_from_stdin.as_deref().map(Secret::from)
+ });
- auth::login(config, &source_ids.original, new_token)?;
+ let options = LoginOptions {
+ token,
+ login_url: login_url.as_deref(),
+ };
- config.shell().status(
- "Login",
- format!("token for `{}` saved", reg.unwrap_or(CRATES_IO_DOMAIN)),
- )?;
+ auth::login(config, &source_ids.original, options, args)?;
Ok(())
}
diff --git a/src/tools/cargo/src/cargo/ops/registry/logout.rs b/src/tools/cargo/src/cargo/ops/registry/logout.rs
index 59f2d9261..d1f080bae 100644
--- a/src/tools/cargo/src/cargo/ops/registry/logout.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/logout.rs
@@ -11,32 +11,6 @@ use super::get_source_id;
pub fn registry_logout(config: &Config, reg: Option<&str>) -> CargoResult<()> {
let source_ids = get_source_id(config, None, reg)?;
- let reg_cfg = auth::registry_credential_config(config, &source_ids.original)?;
- let reg_name = source_ids.original.display_registry_name();
- if reg_cfg.is_none() {
- config
- .shell()
- .status("Logout", format!("not currently logged in to `{reg_name}`"))?;
- return Ok(());
- }
auth::logout(config, &source_ids.original)?;
- config.shell().status(
- "Logout",
- format!("token for `{reg_name}` has been removed from local storage"),
- )?;
- let location = if source_ids.original.is_crates_io() {
- "<https://crates.io/me>".to_string()
- } else {
- // The URL for the source requires network access to load the config.
- // That could be a fairly heavy operation to perform just to provide a
- // help message, so for now this just provides some generic text.
- // Perhaps in the future this could have an API to fetch the config if
- // it is cached, but avoid network access otherwise?
- format!("the `{reg_name}` website")
- };
- config.shell().note(format!(
- "This does not revoke the token on the registry server.\n \
- If you need to revoke the token, visit {location} and follow the instructions there."
- ))?;
Ok(())
}
diff --git a/src/tools/cargo/src/cargo/ops/registry/mod.rs b/src/tools/cargo/src/cargo/ops/registry/mod.rs
index ecb610ddd..94ef1ead2 100644
--- a/src/tools/cargo/src/cargo/ops/registry/mod.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/mod.rs
@@ -10,18 +10,18 @@ mod search;
mod yank;
use std::collections::HashSet;
-use std::path::PathBuf;
use std::str;
use std::task::Poll;
use anyhow::{bail, format_err, Context as _};
+use cargo_credential::{Operation, Secret};
use crates_io::{self, Registry};
use crate::core::source::Source;
use crate::core::SourceId;
use crate::sources::{RegistrySource, SourceConfigMap};
-use crate::util::auth::{self, Secret};
-use crate::util::config::Config;
+use crate::util::auth;
+use crate::util::config::{Config, PathAndArgs};
use crate::util::errors::CargoResult;
use crate::util::network::http::http_handle;
use crate::util::IntoUrl;
@@ -44,7 +44,7 @@ pub enum RegistryCredentialConfig {
/// The authentication token.
Token(Secret<String>),
/// Process used for fetching a token.
- Process((PathBuf, Vec<String>)),
+ Process(Vec<PathAndArgs>),
/// Secret Key and subject for Asymmetric tokens.
AsymmetricKey((Secret<String>, Option<String>)),
}
@@ -75,7 +75,7 @@ impl RegistryCredentialConfig {
None
}
}
- pub fn as_process(&self) -> Option<&(PathBuf, Vec<String>)> {
+ pub fn as_process(&self) -> Option<&Vec<PathAndArgs>> {
if let Self::Process(v) = self {
Some(v)
} else {
@@ -106,7 +106,7 @@ fn registry(
index: Option<&str>,
registry: Option<&str>,
force_update: bool,
- token_required: Option<auth::Mutation<'_>>,
+ token_required: Option<Operation<'_>>,
) -> CargoResult<(Registry, RegistrySourceIds)> {
let source_ids = get_source_id(config, index, registry)?;
@@ -114,7 +114,7 @@ fn registry(
bail!("command-line argument --index requires --token to be specified");
}
if let Some(token) = token_from_cmdline {
- auth::cache_token(config, &source_ids.original, token);
+ auth::cache_token_from_commandline(config, &source_ids.original, token);
}
let cfg = {
@@ -138,11 +138,13 @@ fn registry(
.api
.ok_or_else(|| format_err!("{} does not support API commands", source_ids.replacement))?;
let token = if token_required.is_some() || cfg.auth_required {
+ let operation = token_required.unwrap_or(Operation::Read);
Some(auth::auth_token(
config,
&source_ids.original,
None,
- token_required,
+ operation,
+ vec![],
)?)
} else {
None
diff --git a/src/tools/cargo/src/cargo/ops/registry/owner.rs b/src/tools/cargo/src/cargo/ops/registry/owner.rs
index e53e07cb8..e29c6400b 100644
--- a/src/tools/cargo/src/cargo/ops/registry/owner.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/owner.rs
@@ -3,12 +3,12 @@
//! [1]: https://doc.rust-lang.org/nightly/cargo/reference/registry-web-api.html#owners
use anyhow::Context as _;
+use cargo_credential::Operation;
+use cargo_credential::Secret;
use crate::core::Workspace;
use crate::drop_print;
use crate::drop_println;
-use crate::util::auth;
-use crate::util::auth::Secret;
use crate::util::important_paths::find_root_manifest_for_wd;
use crate::CargoResult;
use crate::Config;
@@ -33,7 +33,7 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
}
};
- let mutation = auth::Mutation::Owners { name: &name };
+ let operation = Operation::Owners { name: &name };
let (mut registry, _) = super::registry(
config,
@@ -41,7 +41,7 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
opts.index.as_deref(),
opts.registry.as_deref(),
true,
- Some(mutation),
+ Some(operation),
)?;
if let Some(ref v) = opts.to_add {
diff --git a/src/tools/cargo/src/cargo/ops/registry/publish.rs b/src/tools/cargo/src/cargo/ops/registry/publish.rs
index 7f4fbbae2..40ca9fd16 100644
--- a/src/tools/cargo/src/cargo/ops/registry/publish.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/publish.rs
@@ -9,6 +9,8 @@ use std::time::Duration;
use anyhow::bail;
use anyhow::Context as _;
+use cargo_credential::Operation;
+use cargo_credential::Secret;
use cargo_util::paths;
use crates_io::NewCrate;
use crates_io::NewCrateDependency;
@@ -28,7 +30,6 @@ use crate::ops::Packages;
use crate::sources::SourceConfigMap;
use crate::sources::CRATES_IO_REGISTRY;
use crate::util::auth;
-use crate::util::auth::Secret;
use crate::util::config::JobsConfig;
use crate::util::Progress;
use crate::util::ProgressStyle;
@@ -113,7 +114,7 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
// This is only used to confirm that we can create a token before we build the package.
// This causes the credential provider to be called an extra time, but keeps the same order of errors.
let ver = pkg.version().to_string();
- let mutation = auth::Mutation::PrePublish;
+ let operation = Operation::Read;
let (mut registry, reg_ids) = super::registry(
opts.config,
@@ -121,7 +122,7 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
opts.index.as_deref(),
publish_registry.as_deref(),
true,
- Some(mutation).filter(|_| !opts.dry_run),
+ Some(operation).filter(|_| !opts.dry_run),
)?;
verify_dependencies(pkg, &registry, reg_ids.original)?;
@@ -149,16 +150,17 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> {
let hash = cargo_util::Sha256::new()
.update_file(tarball.file())?
.finish_hex();
- let mutation = Some(auth::Mutation::Publish {
+ let operation = Operation::Publish {
name: pkg.name().as_str(),
vers: &ver,
cksum: &hash,
- });
+ };
registry.set_token(Some(auth::auth_token(
&opts.config,
&reg_ids.original,
None,
- mutation,
+ operation,
+ vec![],
)?));
}
diff --git a/src/tools/cargo/src/cargo/ops/registry/yank.rs b/src/tools/cargo/src/cargo/ops/registry/yank.rs
index 7f087570a..8a961b990 100644
--- a/src/tools/cargo/src/cargo/ops/registry/yank.rs
+++ b/src/tools/cargo/src/cargo/ops/registry/yank.rs
@@ -5,10 +5,10 @@
use anyhow::bail;
use anyhow::Context as _;
+use cargo_credential::Operation;
+use cargo_credential::Secret;
use crate::core::Workspace;
-use crate::util::auth;
-use crate::util::auth::Secret;
use crate::util::config::Config;
use crate::util::errors::CargoResult;
use crate::util::important_paths::find_root_manifest_for_wd;
@@ -36,12 +36,12 @@ pub fn yank(
};
let message = if undo {
- auth::Mutation::Unyank {
+ Operation::Unyank {
name: &name,
vers: &version,
}
} else {
- auth::Mutation::Yank {
+ Operation::Yank {
name: &name,
vers: &version,
}
diff --git a/src/tools/cargo/src/cargo/ops/resolve.rs b/src/tools/cargo/src/cargo/ops/resolve.rs
index ea5eded4a..6246311a5 100644
--- a/src/tools/cargo/src/cargo/ops/resolve.rs
+++ b/src/tools/cargo/src/cargo/ops/resolve.rs
@@ -71,8 +71,8 @@ use crate::sources::PathSource;
use crate::util::errors::CargoResult;
use crate::util::{profile, CanonicalUrl};
use anyhow::Context as _;
-use log::{debug, trace};
use std::collections::{HashMap, HashSet};
+use tracing::{debug, trace};
/// Result for `resolve_ws_with_opts`.
pub struct WorkspaceResolve<'cfg> {
diff --git a/src/tools/cargo/src/cargo/ops/tree/graph.rs b/src/tools/cargo/src/cargo/ops/tree/graph.rs
index d01d07f1a..f0dad4e5d 100644
--- a/src/tools/cargo/src/cargo/ops/tree/graph.rs
+++ b/src/tools/cargo/src/cargo/ops/tree/graph.rs
@@ -642,7 +642,7 @@ fn add_feature_rec(
let dep_indexes = match graph.dep_name_map[&package_index].get(dep_name) {
Some(indexes) => indexes.clone(),
None => {
- log::debug!(
+ tracing::debug!(
"enabling feature {} on {}, found {}/{}, \
dep appears to not be enabled",
feature_name,
diff --git a/src/tools/cargo/src/cargo/sources/config.rs b/src/tools/cargo/src/cargo/sources/config.rs
index 4097567bb..c51c1f009 100644
--- a/src/tools/cargo/src/cargo/sources/config.rs
+++ b/src/tools/cargo/src/cargo/sources/config.rs
@@ -10,8 +10,8 @@ use crate::util::config::{self, ConfigRelativePath, OptValue};
use crate::util::errors::CargoResult;
use crate::util::{Config, IntoUrl};
use anyhow::{bail, Context as _};
-use log::debug;
use std::collections::{HashMap, HashSet};
+use tracing::debug;
use url::Url;
/// Represents the entire [`[source]` replacement table][1] in Cargo configuration.
diff --git a/src/tools/cargo/src/cargo/sources/git/known_hosts.rs b/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
index 7b013f99c..0b0dd3208 100644
--- a/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
+++ b/src/tools/cargo/src/cargo/sources/git/known_hosts.rs
@@ -141,7 +141,7 @@ pub fn certificate_check(
let Some(host_key) = cert.as_hostkey() else {
// Return passthrough for TLS X509 certificates to use whatever validation
// was done in git2.
- return Ok(CertificateCheckStatus::CertificatePassthrough)
+ return Ok(CertificateCheckStatus::CertificatePassthrough);
};
// If a nonstandard port is in use, check for that first.
// The fallback to check without a port is handled in the HostKeyNotFound handler.
@@ -342,7 +342,7 @@ fn check_ssh_known_hosts(
};
match parse_known_hosts_line(&line_value.val, location) {
Some(known_host) => known_hosts.push(known_host),
- None => log::warn!(
+ None => tracing::warn!(
"failed to parse known host {} from {}",
line_value.val,
line_value.definition
@@ -611,10 +611,18 @@ impl KnownHost {
}
fn hashed_hostname_matches(host: &str, hashed: &str) -> bool {
- let Some((b64_salt, b64_host)) = hashed.split_once('|') else { return false; };
- let Ok(salt) = STANDARD.decode(b64_salt) else { return false; };
- let Ok(hashed_host) = STANDARD.decode(b64_host) else { return false; };
- let Ok(mut mac) = hmac::Hmac::<sha1::Sha1>::new_from_slice(&salt) else { return false; };
+ let Some((b64_salt, b64_host)) = hashed.split_once('|') else {
+ return false;
+ };
+ let Ok(salt) = STANDARD.decode(b64_salt) else {
+ return false;
+ };
+ let Ok(hashed_host) = STANDARD.decode(b64_host) else {
+ return false;
+ };
+ let Ok(mut mac) = hmac::Hmac::<sha1::Sha1>::new_from_slice(&salt) else {
+ return false;
+ };
mac.update(host.as_bytes());
let result = mac.finalize().into_bytes();
hashed_host == &result[..]
diff --git a/src/tools/cargo/src/cargo/sources/git/oxide.rs b/src/tools/cargo/src/cargo/sources/git/oxide.rs
index e86c63e8e..ec4fcecdd 100644
--- a/src/tools/cargo/src/cargo/sources/git/oxide.rs
+++ b/src/tools/cargo/src/cargo/sources/git/oxide.rs
@@ -6,12 +6,12 @@ use crate::util::{human_readable_bytes, network, MetricsCounter, Progress};
use crate::{CargoResult, Config};
use cargo_util::paths;
use gix::bstr::{BString, ByteSlice};
-use log::debug;
use std::cell::RefCell;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Weak};
use std::time::{Duration, Instant};
+use tracing::debug;
/// For the time being, `repo_path` makes it easy to instantiate a gitoxide repo just for fetching.
/// In future this may change to be the gitoxide repository itself.
diff --git a/src/tools/cargo/src/cargo/sources/git/source.rs b/src/tools/cargo/src/cargo/sources/git/source.rs
index b021d23a0..10796562d 100644
--- a/src/tools/cargo/src/cargo/sources/git/source.rs
+++ b/src/tools/cargo/src/cargo/sources/git/source.rs
@@ -10,9 +10,9 @@ use crate::util::hex::short_hash;
use crate::util::Config;
use anyhow::Context;
use cargo_util::paths::exclude_from_backups_and_indexing;
-use log::trace;
use std::fmt::{self, Debug, Formatter};
use std::task::Poll;
+use tracing::trace;
use url::Url;
/// `GitSource` contains one or more packages gathering from a Git repository.
@@ -164,7 +164,10 @@ impl<'cfg> Debug for GitSource<'cfg> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "git repo at {}", self.remote.url())?;
- match self.manifest_reference.pretty_ref() {
+ // TODO(-Znext-lockfile-bump): set it to true when stabilizing
+ // lockfile v4, because we want Source ID serialization to be
+ // consistent with lockfile.
+ match self.manifest_reference.pretty_ref(false) {
Some(s) => write!(f, " ({})", s),
None => Ok(()),
}
diff --git a/src/tools/cargo/src/cargo/sources/git/utils.rs b/src/tools/cargo/src/cargo/sources/git/utils.rs
index 0c7ce8b64..093631091 100644
--- a/src/tools/cargo/src/cargo/sources/git/utils.rs
+++ b/src/tools/cargo/src/cargo/sources/git/utils.rs
@@ -11,7 +11,6 @@ use anyhow::{anyhow, Context as _};
use cargo_util::{paths, ProcessBuilder};
use curl::easy::List;
use git2::{self, ErrorClass, ObjectType, Oid};
-use log::{debug, info};
use serde::ser;
use serde::Serialize;
use std::borrow::Cow;
@@ -21,6 +20,7 @@ use std::process::Command;
use std::str;
use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Duration, Instant};
+use tracing::{debug, info};
use url::Url;
/// A file indicates that if present, `git reset` has been done and a repo
@@ -1316,7 +1316,7 @@ fn clean_repo_temp_files(repo: &git2::Repository) {
let pattern = match path.to_str() {
Some(p) => p,
None => {
- log::warn!("cannot convert {path:?} to a string");
+ tracing::warn!("cannot convert {path:?} to a string");
return;
}
};
@@ -1327,8 +1327,10 @@ fn clean_repo_temp_files(repo: &git2::Repository) {
for path in paths {
if let Ok(path) = path {
match paths::remove_file(&path) {
- Ok(_) => log::debug!("removed stale temp git file {path:?}"),
- Err(e) => log::warn!("failed to remove {path:?} while cleaning temp files: {e}"),
+ Ok(_) => tracing::debug!("removed stale temp git file {path:?}"),
+ Err(e) => {
+ tracing::warn!("failed to remove {path:?} while cleaning temp files: {e}")
+ }
}
}
}
diff --git a/src/tools/cargo/src/cargo/sources/path.rs b/src/tools/cargo/src/cargo/sources/path.rs
index bb40ec9b1..1d8ffc35c 100644
--- a/src/tools/cargo/src/cargo/sources/path.rs
+++ b/src/tools/cargo/src/cargo/sources/path.rs
@@ -11,7 +11,7 @@ use anyhow::Context as _;
use cargo_util::paths;
use filetime::FileTime;
use ignore::gitignore::GitignoreBuilder;
-use log::{trace, warn};
+use tracing::{trace, warn};
use walkdir::WalkDir;
/// A source represents one or multiple packages gathering from a given root
@@ -203,7 +203,7 @@ impl<'cfg> PathSource<'cfg> {
let repo = match git2::Repository::discover(root) {
Ok(repo) => repo,
Err(e) => {
- log::debug!(
+ tracing::debug!(
"could not discover git repo at or above {}: {}",
root.display(),
e
@@ -223,7 +223,7 @@ impl<'cfg> PathSource<'cfg> {
let repo_relative_path = match paths::strip_prefix_canonical(root, repo_root) {
Ok(p) => p,
Err(e) => {
- log::warn!(
+ tracing::warn!(
"cannot determine if path `{:?}` is in git repo `{:?}`: {:?}",
root,
repo_root,
diff --git a/src/tools/cargo/src/cargo/sources/registry/download.rs b/src/tools/cargo/src/cargo/sources/registry/download.rs
index a85d87177..08940b3a1 100644
--- a/src/tools/cargo/src/cargo/sources/registry/download.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/download.rs
@@ -4,6 +4,7 @@
//! [`RemoteRegistry`]: super::remote::RemoteRegistry
use anyhow::Context;
+use cargo_credential::Operation;
use cargo_util::registry::make_dep_path;
use cargo_util::Sha256;
@@ -78,7 +79,13 @@ pub(super) fn download(
}
let authorization = if registry_config.auth_required {
- Some(auth::auth_token(config, &pkg.source_id(), None, None)?)
+ Some(auth::auth_token(
+ config,
+ &pkg.source_id(),
+ None,
+ Operation::Read,
+ vec![],
+ )?)
} else {
None
};
diff --git a/src/tools/cargo/src/cargo/sources/registry/http_remote.rs b/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
index c69ef8f9b..52f6f392e 100644
--- a/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/http_remote.rs
@@ -4,16 +4,16 @@ use crate::core::{PackageId, SourceId};
use crate::sources::registry::download;
use crate::sources::registry::MaybeLock;
use crate::sources::registry::{LoadResponse, RegistryConfig, RegistryData};
-use crate::util::errors::{CargoResult, HttpNotSuccessful, DEBUG_HEADERS};
+use crate::util::errors::{CargoResult, HttpNotSuccessful};
use crate::util::network::http::http_handle;
use crate::util::network::retry::{Retry, RetryResult};
use crate::util::network::sleep::SleepTracker;
use crate::util::{auth, Config, Filesystem, IntoUrl, Progress, ProgressStyle};
use anyhow::Context;
+use cargo_credential::Operation;
use cargo_util::paths;
use curl::easy::{Easy, List};
use curl::multi::{EasyHandle, Multi};
-use log::{debug, trace};
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::fs::{self, File};
@@ -22,6 +22,7 @@ use std::path::{Path, PathBuf};
use std::str;
use std::task::{ready, Poll};
use std::time::Duration;
+use tracing::{debug, trace};
use url::Url;
// HTTP headers
@@ -96,6 +97,9 @@ pub struct HttpRegistry<'cfg> {
/// Url to get a token for the registry.
login_url: Option<Url>,
+ /// Headers received with an HTTP 401.
+ auth_error_headers: Vec<String>,
+
/// Disables status messages.
quiet: bool,
}
@@ -149,8 +153,8 @@ struct Headers {
last_modified: Option<String>,
etag: Option<String>,
www_authenticate: Vec<String>,
- /// We don't care about these headers. Put them here for debugging purpose.
- others: Vec<String>,
+ /// All headers, including explicit headers above.
+ all: Vec<String>,
}
/// HTTP status code [`HttpRegistry`] cares about.
@@ -221,6 +225,7 @@ impl<'cfg> HttpRegistry<'cfg> {
registry_config: None,
auth_required: false,
login_url: None,
+ auth_error_headers: vec![],
quiet: false,
})
}
@@ -316,7 +321,7 @@ impl<'cfg> HttpRegistry<'cfg> {
&mut handle,
&url,
data,
- download.header_map.take().others,
+ download.header_map.take().all,
)
.into());
}
@@ -389,11 +394,11 @@ impl<'cfg> HttpRegistry<'cfg> {
Ok(json) => {
self.registry_config = Some(json);
}
- Err(e) => log::debug!("failed to decode cached config.json: {}", e),
+ Err(e) => tracing::debug!("failed to decode cached config.json: {}", e),
},
Err(e) => {
if e.kind() != ErrorKind::NotFound {
- log::debug!("failed to read config.json cache: {}", e)
+ tracing::debug!("failed to read config.json cache: {}", e)
}
}
}
@@ -418,7 +423,7 @@ impl<'cfg> HttpRegistry<'cfg> {
self.registry_config = Some(serde_json::from_slice(&raw_data)?);
if paths::create_dir_all(&config_json_path.parent().unwrap()).is_ok() {
if let Err(e) = fs::write(&config_json_path, &raw_data) {
- log::debug!("failed to write config.json cache: {}", e);
+ tracing::debug!("failed to write config.json cache: {}", e);
}
}
Poll::Ready(Ok(self.registry_config.as_ref().unwrap()))
@@ -569,6 +574,7 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
}
}
}
+ self.auth_error_headers = result.header_map.all;
}
StatusCode::Unauthorized => {
let err = Err(HttpNotSuccessful {
@@ -576,7 +582,7 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
body: result.data,
url: self.full_url(path),
ip: None,
- headers: result.header_map.others,
+ headers: result.header_map.all,
}
.into());
if self.auth_required {
@@ -639,8 +645,13 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
}
}
if self.auth_required {
- let authorization =
- auth::auth_token(self.config, &self.source_id, self.login_url.as_ref(), None)?;
+ let authorization = auth::auth_token(
+ self.config,
+ &self.source_id,
+ self.login_url.as_ref(),
+ Operation::Read,
+ self.auth_error_headers.clone(),
+ )?;
headers.append(&format!("Authorization: {}", authorization))?;
trace!("including authorization for {}", full_url);
}
@@ -679,15 +690,12 @@ impl<'cfg> RegistryData for HttpRegistry<'cfg> {
tls::with(|downloads| {
if let Some(downloads) = downloads {
let mut header_map = downloads.pending[&token].0.header_map.borrow_mut();
+ header_map.all.push(format!("{tag}: {value}"));
match tag.to_ascii_lowercase().as_str() {
LAST_MODIFIED => header_map.last_modified = Some(value.to_string()),
ETAG => header_map.etag = Some(value.to_string()),
WWW_AUTHENTICATE => header_map.www_authenticate.push(value.to_string()),
- _ => {
- if DEBUG_HEADERS.iter().any(|prefix| tag.starts_with(prefix)) {
- header_map.others.push(format!("{tag}: {value}"));
- }
- }
+ _ => {}
}
}
});
@@ -808,7 +816,9 @@ impl<'cfg> Downloads<'cfg> {
/// Updates the state of the progress bar for downloads.
fn tick(&self) -> CargoResult<()> {
let mut progress = self.progress.borrow_mut();
- let Some(progress) = progress.as_mut() else { return Ok(()); };
+ let Some(progress) = progress.as_mut() else {
+ return Ok(());
+ };
// Since the sparse protocol discovers dependencies as it goes,
// it's not possible to get an accurate progress indication.
diff --git a/src/tools/cargo/src/cargo/sources/registry/index.rs b/src/tools/cargo/src/cargo/sources/registry/index.rs
index 6d565da8f..05bfe71af 100644
--- a/src/tools/cargo/src/cargo/sources/registry/index.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/index.rs
@@ -94,7 +94,6 @@ use crate::util::IntoUrl;
use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq, ToSemver};
use anyhow::bail;
use cargo_util::{paths, registry::make_dep_path};
-use log::{debug, info};
use semver::Version;
use serde::Deserialize;
use std::borrow::Cow;
@@ -105,6 +104,7 @@ use std::io::ErrorKind;
use std::path::Path;
use std::str;
use std::task::{ready, Poll};
+use tracing::{debug, info};
/// The current version of [`SummariesCache`].
const CURRENT_CACHE_VERSION: u8 = 3;
@@ -379,7 +379,9 @@ impl<'cfg> RegistryIndex<'cfg> {
pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> Poll<CargoResult<&str>> {
let req = OptVersionReq::exact(pkg.version());
let summary = self.summaries(&pkg.name(), &req, load)?;
- let summary = ready!(summary).next();
+ let summary = ready!(summary)
+ .filter(|s| s.summary.version() == pkg.version())
+ .next();
Poll::Ready(Ok(summary
.ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?
.summary
@@ -623,10 +625,10 @@ impl<'cfg> RegistryIndex<'cfg> {
load: &mut dyn RegistryData,
) -> Poll<CargoResult<bool>> {
let req = OptVersionReq::exact(pkg.version());
- let found = self
- .summaries(&pkg.name(), &req, load)
- .map_ok(|mut p| p.any(|summary| summary.yanked));
- found
+ let found = ready!(self.summaries(&pkg.name(), &req, load))?
+ .filter(|s| s.summary.version() == pkg.version())
+ .any(|summary| summary.yanked);
+ Poll::Ready(Ok(found))
}
}
@@ -673,23 +675,23 @@ impl Summaries {
index_version = Some(v);
}
Err(e) => {
- log::debug!("failed to parse {:?} cache: {}", relative, e);
+ tracing::debug!("failed to parse {:?} cache: {}", relative, e);
}
},
- Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e),
+ Err(e) => tracing::debug!("cache missing for {:?} error: {}", relative, e),
}
let response = ready!(load.load(root, relative, index_version.as_deref())?);
match response {
LoadResponse::CacheValid => {
- log::debug!("fast path for registry cache of {:?}", relative);
+ tracing::debug!("fast path for registry cache of {:?}", relative);
return Poll::Ready(Ok(cached_summaries));
}
LoadResponse::NotFound => {
if let Err(e) = fs::remove_file(cache_path) {
if e.kind() != ErrorKind::NotFound {
- log::debug!("failed to remove from cache: {}", e);
+ tracing::debug!("failed to remove from cache: {}", e);
}
}
return Poll::Ready(Ok(None));
@@ -701,7 +703,7 @@ impl Summaries {
// This is the fallback path where we actually talk to the registry backend to load
// information. Here we parse every single line in the index (as we need
// to find the versions)
- log::debug!("slow path for {:?}", relative);
+ tracing::debug!("slow path for {:?}", relative);
let mut cache = SummariesCache::default();
let mut ret = Summaries::default();
ret.raw_data = raw_data;
@@ -722,7 +724,11 @@ impl Summaries {
// entries in the cache preventing those newer
// versions from reading them (that is, until the
// cache is rebuilt).
- log::info!("failed to parse {:?} registry package: {}", relative, e);
+ tracing::info!(
+ "failed to parse {:?} registry package: {}",
+ relative,
+ e
+ );
continue;
}
};
@@ -731,7 +737,7 @@ impl Summaries {
ret.versions.insert(version, summary.into());
}
if let Some(index_version) = index_version {
- log::trace!("caching index_version {}", index_version);
+ tracing::trace!("caching index_version {}", index_version);
let cache_bytes = cache.serialize(index_version.as_str());
// Once we have our `cache_bytes` which represents the `Summaries` we're
// about to return, write that back out to disk so future Cargo
@@ -743,7 +749,7 @@ impl Summaries {
let path = Filesystem::new(cache_path.clone());
config.assert_package_cache_locked(&path);
if let Err(e) = fs::write(cache_path, &cache_bytes) {
- log::info!("failed to write cache: {}", e);
+ tracing::info!("failed to write cache: {}", e);
}
}
@@ -906,7 +912,7 @@ impl IndexSummary {
v,
} = serde_json::from_slice(line)?;
let v = v.unwrap_or(1);
- log::trace!("json parsed registry {}/{}", name, vers);
+ tracing::trace!("json parsed registry {}/{}", name, vers);
let pkgid = PackageId::new(name, &vers, source_id)?;
let deps = deps
.into_iter()
diff --git a/src/tools/cargo/src/cargo/sources/registry/mod.rs b/src/tools/cargo/src/cargo/sources/registry/mod.rs
index a0178db55..c1ff8b12e 100644
--- a/src/tools/cargo/src/cargo/sources/registry/mod.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/mod.rs
@@ -195,10 +195,10 @@ use std::task::{ready, Poll};
use anyhow::Context as _;
use cargo_util::paths::{self, exclude_from_backups_and_indexing};
use flate2::read::GzDecoder;
-use log::debug;
use serde::Deserialize;
use serde::Serialize;
use tar::Archive;
+use tracing::debug;
use crate::core::dependency::Dependency;
use crate::core::source::MaybePackage;
@@ -589,9 +589,9 @@ impl<'cfg> RegistrySource<'cfg> {
}
_ => {
if ok == "ok" {
- log::debug!("old `ok` content found, clearing cache");
+ tracing::debug!("old `ok` content found, clearing cache");
} else {
- log::warn!("unrecognized .cargo-ok content, clearing cache: {ok}");
+ tracing::warn!("unrecognized .cargo-ok content, clearing cache: {ok}");
}
// See comment of `unpack_package` about why removing all stuff.
paths::remove_dir_all(dst.as_path_unlocked())?;
@@ -694,6 +694,7 @@ impl<'cfg> RegistrySource<'cfg> {
.summaries(&package.name(), &req, &mut *self.ops)?
.expect("a downloaded dep now pending!?")
.map(|s| s.summary.clone())
+ .filter(|s| s.version() == package.version())
.next()
.expect("summary not found");
if let Some(cksum) = summary_with_cksum.checksum() {
@@ -887,7 +888,7 @@ impl<'cfg> Source for RegistrySource<'cfg> {
impl RegistryConfig {
/// File name of [`RegistryConfig`].
- const NAME: &str = "config.json";
+ const NAME: &'static str = "config.json";
}
/// Get the maximum upack size that Cargo permits
diff --git a/src/tools/cargo/src/cargo/sources/registry/remote.rs b/src/tools/cargo/src/cargo/sources/registry/remote.rs
index 4223b0303..89927181f 100644
--- a/src/tools/cargo/src/cargo/sources/registry/remote.rs
+++ b/src/tools/cargo/src/cargo/sources/registry/remote.rs
@@ -12,13 +12,13 @@ use crate::util::{Config, Filesystem};
use anyhow::Context as _;
use cargo_util::paths;
use lazycell::LazyCell;
-use log::{debug, trace};
use std::cell::{Cell, Ref, RefCell};
use std::fs::File;
use std::mem;
use std::path::Path;
use std::str;
use std::task::{ready, Poll};
+use tracing::{debug, trace};
/// A remote registry is a registry that lives at a remote URL (such as
/// crates.io). The git index is cloned locally, and `.crate` files are
diff --git a/src/tools/cargo/src/cargo/util/auth/asymmetric.rs b/src/tools/cargo/src/cargo/util/auth/asymmetric.rs
deleted file mode 100644
index 50882a745..000000000
--- a/src/tools/cargo/src/cargo/util/auth/asymmetric.rs
+++ /dev/null
@@ -1,155 +0,0 @@
-//! Registry asymmetric authentication support. See [RFC 3231] for more.
-//!
-//! [RFC 3231]: https://rust-lang.github.io/rfcs/3231-cargo-asymmetric-tokens.html
-
-use pasetors::keys::AsymmetricPublicKey;
-use pasetors::keys::AsymmetricSecretKey;
-use pasetors::paserk;
-use pasetors::paserk::FormatAsPaserk;
-use pasetors::version3;
-use pasetors::version3::PublicToken;
-use time::format_description::well_known::Rfc3339;
-use time::OffsetDateTime;
-
-use crate::core::SourceId;
-use crate::ops::RegistryCredentialConfig;
-use crate::CargoResult;
-
-use super::Mutation;
-use super::Secret;
-
-/// The main body of an asymmetric token as describe in RFC 3231.
-#[derive(serde::Serialize)]
-struct Message<'a> {
- iat: &'a str,
- #[serde(skip_serializing_if = "Option::is_none")]
- sub: Option<&'a str>,
- #[serde(skip_serializing_if = "Option::is_none")]
- mutation: Option<&'a str>,
- #[serde(skip_serializing_if = "Option::is_none")]
- name: Option<&'a str>,
- #[serde(skip_serializing_if = "Option::is_none")]
- vers: Option<&'a str>,
- #[serde(skip_serializing_if = "Option::is_none")]
- cksum: Option<&'a str>,
- #[serde(skip_serializing_if = "Option::is_none")]
- challenge: Option<&'a str>,
- /// This field is not yet used. This field can be set to a value >1 to
- /// indicate a breaking change in the token format.
- #[serde(skip_serializing_if = "Option::is_none")]
- v: Option<u8>,
-}
-
-/// The footer of an asymmetric token as describe in RFC 3231.
-#[derive(serde::Serialize)]
-struct Footer<'a> {
- url: &'a str,
- kip: paserk::Id,
-}
-
-/// Checks that a secret key is valid, and returns the associated public key in
-/// Paserk format.
-pub fn paserk_public_from_paserk_secret(secret_key: Secret<&str>) -> Option<String> {
- let secret: Secret<AsymmetricSecretKey<version3::V3>> =
- secret_key.map(|key| key.try_into()).transpose().ok()?;
- let public: AsymmetricPublicKey<version3::V3> = secret
- .as_ref()
- .map(|key| key.try_into())
- .transpose()
- .ok()?
- .expose();
- let mut paserk_pub_key = String::new();
- FormatAsPaserk::fmt(&public, &mut paserk_pub_key).unwrap();
- Some(paserk_pub_key)
-}
-
-/// Generates a public token from a registry's `credential` configuration for
-/// authenticating to a `source_id`
-///
-/// An optional `mutation` for authenticating a mutation operation aganist the
-/// registry.
-pub fn public_token_from_credential(
- credential: RegistryCredentialConfig,
- source_id: &SourceId,
- mutation: Option<&'_ Mutation<'_>>,
-) -> CargoResult<Secret<String>> {
- let RegistryCredentialConfig::AsymmetricKey((secret_key, secret_key_subject)) = credential else {
- anyhow::bail!("credential must be an asymmetric secret key")
- };
-
- let secret: Secret<AsymmetricSecretKey<version3::V3>> =
- secret_key.map(|key| key.as_str().try_into()).transpose()?;
- let public: AsymmetricPublicKey<version3::V3> = secret
- .as_ref()
- .map(|key| key.try_into())
- .transpose()?
- .expose();
- let kip = (&public).try_into()?;
- let iat = OffsetDateTime::now_utc();
-
- let message = Message {
- iat: &iat.format(&Rfc3339)?,
- sub: secret_key_subject.as_deref(),
- mutation: mutation.and_then(|m| {
- Some(match m {
- Mutation::PrePublish => return None,
- Mutation::Publish { .. } => "publish",
- Mutation::Yank { .. } => "yank",
- Mutation::Unyank { .. } => "unyank",
- Mutation::Owners { .. } => "owners",
- })
- }),
- name: mutation.and_then(|m| {
- Some(match m {
- Mutation::PrePublish => return None,
- Mutation::Publish { name, .. }
- | Mutation::Yank { name, .. }
- | Mutation::Unyank { name, .. }
- | Mutation::Owners { name, .. } => *name,
- })
- }),
- vers: mutation.and_then(|m| {
- Some(match m {
- Mutation::PrePublish | Mutation::Owners { .. } => return None,
- Mutation::Publish { vers, .. }
- | Mutation::Yank { vers, .. }
- | Mutation::Unyank { vers, .. } => *vers,
- })
- }),
- cksum: mutation.and_then(|m| {
- Some(match m {
- Mutation::PrePublish
- | Mutation::Yank { .. }
- | Mutation::Unyank { .. }
- | Mutation::Owners { .. } => return None,
- Mutation::Publish { cksum, .. } => *cksum,
- })
- }),
- challenge: None, // todo: PASETO with challenges
- v: None,
- };
-
- let footer = Footer {
- url: &source_id.url().to_string(),
- kip,
- };
-
- let secret = secret
- .map(|secret| {
- PublicToken::sign(
- &secret,
- serde_json::to_string(&message)
- .expect("cannot serialize")
- .as_bytes(),
- Some(
- serde_json::to_string(&footer)
- .expect("cannot serialize")
- .as_bytes(),
- ),
- None,
- )
- })
- .transpose()?;
-
- Ok(secret)
-}
diff --git a/src/tools/cargo/src/cargo/util/auth/mod.rs b/src/tools/cargo/src/cargo/util/auth/mod.rs
index 58309964f..60a356fa0 100644
--- a/src/tools/cargo/src/cargo/util/auth/mod.rs
+++ b/src/tools/cargo/src/cargo/util/auth/mod.rs
@@ -1,143 +1,227 @@
//! Registry authentication support.
-mod asymmetric;
+use crate::{
+ sources::CRATES_IO_REGISTRY,
+ util::{config::ConfigKey, CanonicalUrl, CargoResult, Config, IntoUrl},
+};
+use anyhow::{bail, Context as _};
+use cargo_credential::{
+ Action, CacheControl, Credential, CredentialResponse, LoginOptions, Operation, RegistryInfo,
+ Secret,
+};
-use crate::util::{config, config::ConfigKey, CanonicalUrl, CargoResult, Config, IntoUrl};
-use anyhow::{bail, format_err, Context as _};
-use cargo_util::ProcessError;
use core::fmt;
use serde::Deserialize;
-use std::collections::HashMap;
use std::error::Error;
-use std::io::{Read, Write};
-use std::ops::Deref;
-use std::path::PathBuf;
-use std::process::{Command, Stdio};
+use time::{Duration, OffsetDateTime};
use url::Url;
use crate::core::SourceId;
-use crate::ops::RegistryCredentialConfig;
+use crate::util::config::Value;
+use crate::util::credential::adaptor::BasicProcessCredential;
+use crate::util::credential::paseto::PasetoCredential;
-pub use self::asymmetric::paserk_public_from_paserk_secret;
+use super::{
+ config::{CredentialCacheValue, OptValue, PathAndArgs},
+ credential::process::CredentialProcessCredential,
+ credential::token::TokenCredential,
+};
-use super::config::CredentialCacheValue;
-
-/// A wrapper for values that should not be printed.
-///
-/// This type does not implement `Display`, and has a `Debug` impl that hides
-/// the contained value.
-///
-/// ```
-/// # use cargo::util::auth::Secret;
-/// let token = Secret::from("super secret string");
-/// assert_eq!(format!("{:?}", token), "Secret { inner: \"REDACTED\" }");
-/// ```
+/// `[registries.NAME]` tables.
///
-/// Currently, we write a borrowed `Secret<T>` as `Secret<&T>`.
-/// The [`as_deref`](Secret::as_deref) and [`owned`](Secret::owned) methods can
-/// be used to convert back and forth between `Secret<String>` and `Secret<&str>`.
-#[derive(Default, Clone, PartialEq, Eq)]
-pub struct Secret<T> {
- inner: T,
+/// The values here should be kept in sync with `RegistryConfigExtended`
+#[derive(Deserialize, Clone, Debug)]
+#[serde(rename_all = "kebab-case")]
+pub struct RegistryConfig {
+ pub index: Option<String>,
+ pub token: OptValue<Secret<String>>,
+ pub credential_provider: Option<PathAndArgs>,
+ pub secret_key: OptValue<Secret<String>>,
+ pub secret_key_subject: Option<String>,
+ #[serde(rename = "protocol")]
+ _protocol: Option<String>,
}
-impl<T> Secret<T> {
- /// Unwraps the contained value.
- ///
- /// Use of this method marks the boundary of where the contained value is
- /// hidden.
- pub fn expose(self) -> T {
- self.inner
- }
-
- /// Converts a `Secret<T>` to a `Secret<&T::Target>`.
- /// ```
- /// # use cargo::util::auth::Secret;
- /// let owned: Secret<String> = Secret::from(String::from("token"));
- /// let borrowed: Secret<&str> = owned.as_deref();
- /// ```
- pub fn as_deref(&self) -> Secret<&<T as Deref>::Target>
- where
- T: Deref,
- {
- Secret::from(self.inner.deref())
- }
-
- /// Converts a `Secret<T>` to a `Secret<&T>`.
- pub fn as_ref(&self) -> Secret<&T> {
- Secret::from(&self.inner)
- }
-
- /// Converts a `Secret<T>` to a `Secret<U>` by applying `f` to the contained value.
- pub fn map<U, F>(self, f: F) -> Secret<U>
- where
- F: FnOnce(T) -> U,
- {
- Secret::from(f(self.inner))
- }
+/// The `[registry]` table, which more keys than the `[registries.NAME]` tables.
+///
+/// Note: nesting `RegistryConfig` inside this struct and using `serde(flatten)` *should* work
+/// but fails with "invalid type: sequence, expected a value" when attempting to deserialize.
+#[derive(Deserialize)]
+#[serde(rename_all = "kebab-case")]
+pub struct RegistryConfigExtended {
+ pub index: Option<String>,
+ pub token: OptValue<Secret<String>>,
+ pub credential_provider: Option<PathAndArgs>,
+ pub secret_key: OptValue<Secret<String>>,
+ pub secret_key_subject: Option<String>,
+ #[serde(rename = "default")]
+ _default: Option<String>,
+ #[serde(rename = "global-credential-providers")]
+ _global_credential_providers: Option<Vec<String>>,
}
-impl<T: ToOwned + ?Sized> Secret<&T> {
- /// Converts a `Secret` containing a borrowed type to a `Secret` containing the
- /// corresponding owned type.
- /// ```
- /// # use cargo::util::auth::Secret;
- /// let borrowed: Secret<&str> = Secret::from("token");
- /// let owned: Secret<String> = borrowed.owned();
- /// ```
- pub fn owned(&self) -> Secret<<T as ToOwned>::Owned> {
- Secret::from(self.inner.to_owned())
+impl RegistryConfigExtended {
+ pub fn to_registry_config(self) -> RegistryConfig {
+ RegistryConfig {
+ index: self.index,
+ token: self.token,
+ credential_provider: self.credential_provider,
+ secret_key: self.secret_key,
+ secret_key_subject: self.secret_key_subject,
+ _protocol: None,
+ }
}
}
-impl<T, E> Secret<Result<T, E>> {
- /// Converts a `Secret<Result<T, E>>` to a `Result<Secret<T>, E>`.
- pub fn transpose(self) -> Result<Secret<T>, E> {
- self.inner.map(|v| Secret::from(v))
- }
-}
+/// Get the list of credential providers for a registry source.
+fn credential_provider(config: &Config, sid: &SourceId) -> CargoResult<Vec<Vec<String>>> {
+ let cfg = registry_credential_config_raw(config, sid)?;
+ let allow_cred_proc = config.cli_unstable().credential_process;
+ let default_providers = || {
+ if allow_cred_proc {
+ // Enable the PASETO provider
+ vec![
+ vec!["cargo:token".to_string()],
+ vec!["cargo:paseto".to_string()],
+ ]
+ } else {
+ vec![vec!["cargo:token".to_string()]]
+ }
+ };
+ let global_providers = config
+ .get::<Option<Vec<Value<String>>>>("registry.global-credential-providers")?
+ .filter(|p| !p.is_empty() && allow_cred_proc)
+ .map(|p| {
+ p.iter()
+ .rev()
+ .map(PathAndArgs::from_whitespace_separated_string)
+ .map(|p| resolve_credential_alias(config, p))
+ .collect()
+ })
+ .unwrap_or_else(default_providers);
+ tracing::debug!(?global_providers);
-impl<T: AsRef<str>> Secret<T> {
- /// Checks if the contained value is empty.
- pub fn is_empty(&self) -> bool {
- self.inner.as_ref().is_empty()
- }
-}
+ let providers = match cfg {
+ // If there's a specific provider configured for this registry, use it.
+ Some(RegistryConfig {
+ credential_provider: Some(provider),
+ token,
+ secret_key,
+ ..
+ }) if allow_cred_proc => {
+ if let Some(token) = token {
+ config.shell().warn(format!(
+ "{sid} has a token configured in {} that will be ignored \
+ because a credential-provider is configured for this registry`",
+ token.definition
+ ))?;
+ }
+ if let Some(secret_key) = secret_key {
+ config.shell().warn(format!(
+ "{sid} has a secret-key configured in {} that will be ignored \
+ because a credential-provider is configured for this registry`",
+ secret_key.definition
+ ))?;
+ }
+ vec![resolve_credential_alias(config, provider)]
+ }
-impl<T> From<T> for Secret<T> {
- fn from(inner: T) -> Self {
- Self { inner }
- }
-}
+ // Warning for both `token` and `secret-key`, stating which will be ignored
+ Some(RegistryConfig {
+ token: Some(token),
+ secret_key: Some(secret_key),
+ ..
+ }) if allow_cred_proc => {
+ let token_pos = global_providers
+ .iter()
+ .position(|p| p.first().map(String::as_str) == Some("cargo:token"));
+ let paseto_pos = global_providers
+ .iter()
+ .position(|p| p.first().map(String::as_str) == Some("cargo:paseto"));
+ match (token_pos, paseto_pos) {
+ (Some(token_pos), Some(paseto_pos)) => {
+ if token_pos < paseto_pos {
+ config.shell().warn(format!(
+ "{sid} has a `secret_key` configured in {} that will be ignored \
+ because a `token` is also configured, and the `cargo:token` provider is \
+ configured with higher precedence",
+ secret_key.definition
+ ))?;
+ } else {
+ config.shell().warn(format!("{sid} has a `token` configured in {} that will be ignored \
+ because a `secret_key` is also configured, and the `cargo:paseto` provider is \
+ configured with higher precedence", token.definition))?;
+ }
+ }
+ (_, _) => {
+ // One or both of the below individual warnings will trigger
+ }
+ }
+ global_providers
+ }
-impl<T> fmt::Debug for Secret<T> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Secret")
- .field("inner", &"REDACTED")
- .finish()
- }
+ // Check if a `token` is configured that will be ignored.
+ Some(RegistryConfig {
+ token: Some(token), ..
+ }) => {
+ if !global_providers
+ .iter()
+ .any(|p| p.first().map(String::as_str) == Some("cargo:token"))
+ {
+ config.shell().warn(format!(
+ "{sid} has a token configured in {} that will be ignored \
+ because the `cargo:token` credential provider is not listed in \
+ `registry.global-credential-providers`",
+ token.definition
+ ))?;
+ }
+ global_providers
+ }
+
+ // Check if a asymmetric token is configured that will be ignored.
+ Some(RegistryConfig {
+ secret_key: Some(token),
+ ..
+ }) if allow_cred_proc => {
+ if !global_providers
+ .iter()
+ .any(|p| p.first().map(String::as_str) == Some("cargo:paseto"))
+ {
+ config.shell().warn(format!(
+ "{sid} has a secret-key configured in {} that will be ignored \
+ because the `cargo:paseto` credential provider is not listed in \
+ `registry.global-credential-providers`",
+ token.definition
+ ))?;
+ }
+ global_providers
+ }
+
+ // If we couldn't find a registry-specific provider, use the fallback provider list.
+ None | Some(RegistryConfig { .. }) => global_providers,
+ };
+ Ok(providers)
}
/// Get the credential configuration for a `SourceId`.
-pub fn registry_credential_config(
+pub fn registry_credential_config_raw(
config: &Config,
sid: &SourceId,
-) -> CargoResult<RegistryCredentialConfig> {
- #[derive(Deserialize)]
- #[serde(rename_all = "kebab-case")]
- struct RegistryConfig {
- index: Option<String>,
- token: Option<String>,
- credential_process: Option<config::PathAndArgs>,
- secret_key: Option<String>,
- secret_key_subject: Option<String>,
- #[serde(rename = "default")]
- _default: Option<String>,
- #[serde(rename = "protocol")]
- _protocol: Option<String>,
+) -> CargoResult<Option<RegistryConfig>> {
+ let mut cache = config.registry_config();
+ if let Some(cfg) = cache.get(&sid) {
+ return Ok(cfg.clone());
}
+ let cfg = registry_credential_config_raw_uncached(config, sid)?;
+ cache.insert(*sid, cfg.clone());
+ return Ok(cfg);
+}
- log::trace!("loading credential config for {}", sid);
+fn registry_credential_config_raw_uncached(
+ config: &Config,
+ sid: &SourceId,
+) -> CargoResult<Option<RegistryConfig>> {
+ tracing::trace!("loading credential config for {}", sid);
config.load_credentials()?;
if !sid.is_remote_registry() {
bail!(
@@ -150,22 +234,9 @@ pub fn registry_credential_config(
// Handle crates.io specially, since it uses different configuration keys.
if sid.is_crates_io() {
config.check_registry_index_not_set()?;
- let RegistryConfig {
- token,
- credential_process,
- secret_key,
- secret_key_subject,
- ..
- } = config.get::<RegistryConfig>("registry")?;
- return registry_credential_config_inner(
- true,
- None,
- token.map(Secret::from),
- credential_process,
- secret_key.map(Secret::from),
- secret_key_subject,
- config,
- );
+ return Ok(config
+ .get::<Option<RegistryConfigExtended>>("registry")?
+ .map(|c| c.to_registry_config()));
}
// Find the SourceId's name by its index URL. If environment variables
@@ -179,6 +250,7 @@ pub fn registry_credential_config(
// This also allows the authorization token for a registry to be set
// without knowing the registry name by using the _INDEX and _TOKEN
// environment variables.
+
let name = {
// Discover names from environment variables.
let index = sid.canonical_url();
@@ -198,14 +270,17 @@ pub fn registry_credential_config(
// Discover names from the configuration only if none were found in the environment.
if names.len() == 0 {
- names = config
- .get::<HashMap<String, RegistryConfig>>("registries")?
- .iter()
- .filter_map(|(k, v)| Some((k, v.index.as_deref()?)))
- .filter_map(|(k, v)| Some((k, CanonicalUrl::new(&v.into_url().ok()?).ok()?)))
- .filter(|(_, v)| v == index)
- .map(|(k, _)| k.to_string())
- .collect();
+ if let Some(registries) = config.values()?.get("registries") {
+ let (registries, _) = registries.table("registries")?;
+ for (name, value) in registries {
+ if let Some(v) = value.table(&format!("registries.{name}"))?.0.get("index") {
+ let (v, _) = v.string(&format!("registries.{name}.index"))?;
+ if index == &CanonicalUrl::new(&v.into_url()?)? {
+ names.push(name.clone());
+ }
+ }
+ }
+ }
}
names.sort();
match names.len() {
@@ -232,99 +307,34 @@ pub fn registry_credential_config(
}
}
- let (token, credential_process, secret_key, secret_key_subject) = if let Some(name) = &name {
- log::debug!("found alternative registry name `{name}` for {sid}");
- let RegistryConfig {
- token,
- secret_key,
- secret_key_subject,
- credential_process,
- ..
- } = config.get::<RegistryConfig>(&format!("registries.{name}"))?;
- (token, credential_process, secret_key, secret_key_subject)
+ if let Some(name) = &name {
+ tracing::debug!("found alternative registry name `{name}` for {sid}");
+ config.get::<Option<RegistryConfig>>(&format!("registries.{name}"))
} else {
- log::debug!("no registry name found for {sid}");
- (None, None, None, None)
- };
-
- registry_credential_config_inner(
- false,
- name.as_deref(),
- token.map(Secret::from),
- credential_process,
- secret_key.map(Secret::from),
- secret_key_subject,
- config,
- )
+ tracing::debug!("no registry name found for {sid}");
+ Ok(None)
+ }
}
-fn registry_credential_config_inner(
- is_crates_io: bool,
- name: Option<&str>,
- token: Option<Secret<String>>,
- credential_process: Option<config::PathAndArgs>,
- secret_key: Option<Secret<String>>,
- secret_key_subject: Option<String>,
- config: &Config,
-) -> CargoResult<RegistryCredentialConfig> {
- let credential_process =
- credential_process.filter(|_| config.cli_unstable().credential_process);
- let secret_key = secret_key.filter(|_| config.cli_unstable().registry_auth);
- let secret_key_subject = secret_key_subject.filter(|_| config.cli_unstable().registry_auth);
- let err_both = |token_key: &str, proc_key: &str| {
- let registry = if is_crates_io {
- "".to_string()
- } else {
- format!(" for registry `{}`", name.unwrap_or("UN-NAMED"))
- };
- Err(format_err!(
- "both `{token_key}` and `{proc_key}` \
- were specified in the config{registry}.\n\
- Only one of these values may be set, remove one or the other to proceed.",
- ))
- };
- Ok(
- match (token, credential_process, secret_key, secret_key_subject) {
- (Some(_), Some(_), _, _) => return err_both("token", "credential-process"),
- (Some(_), _, Some(_), _) => return err_both("token", "secret-key"),
- (_, Some(_), Some(_), _) => return err_both("credential-process", "secret-key"),
- (_, _, None, Some(_)) => {
- let registry = if is_crates_io {
- "".to_string()
- } else {
- format!(" for registry `{}`", name.as_ref().unwrap())
- };
- return Err(format_err!(
- "`secret-key-subject` was set but `secret-key` was not in the config{}.\n\
- Either set the `secret-key` or remove the `secret-key-subject`.",
- registry
- ));
- }
- (Some(token), _, _, _) => RegistryCredentialConfig::Token(token),
- (_, Some(process), _, _) => RegistryCredentialConfig::Process((
- process.path.resolve_program(config),
- process.args,
- )),
- (None, None, Some(key), subject) => {
- RegistryCredentialConfig::AsymmetricKey((key, subject))
- }
- (None, None, None, _) => {
- if !is_crates_io {
- // If we couldn't find a registry-specific credential, try the global credential process.
- if let Some(process) = config
- .get::<Option<config::PathAndArgs>>("registry.credential-process")?
- .filter(|_| config.cli_unstable().credential_process)
- {
- return Ok(RegistryCredentialConfig::Process((
- process.path.resolve_program(config),
- process.args,
- )));
- }
- }
- RegistryCredentialConfig::None
- }
- },
- )
+/// Use the `[credential-alias]` table to see if the provider name has been aliased.
+fn resolve_credential_alias(config: &Config, mut provider: PathAndArgs) -> Vec<String> {
+ if provider.args.is_empty() {
+ let key = format!("credential-alias.{}", provider.path.raw_value());
+ if let Ok(alias) = config.get::<PathAndArgs>(&key) {
+ tracing::debug!("resolving credential alias '{key}' -> '{alias:?}'");
+ provider = alias;
+ }
+ }
+ provider.args.insert(
+ 0,
+ provider
+ .path
+ .resolve_program(config)
+ .to_str()
+ .unwrap()
+ .to_string(),
+ );
+ provider.args
}
#[derive(Debug, PartialEq)]
@@ -403,18 +413,85 @@ my-registry = {{ index = "{}" }}
}
// Store a token in the cache for future calls.
-pub fn cache_token(config: &Config, sid: &SourceId, token: Secret<&str>) {
+pub fn cache_token_from_commandline(config: &Config, sid: &SourceId, token: Secret<&str>) {
let url = sid.canonical_url();
config.credential_cache().insert(
url.clone(),
CredentialCacheValue {
- from_commandline: true,
- independent_of_endpoint: true,
- token_value: token.owned(),
+ token_value: token.to_owned(),
+ expiration: None,
+ operation_independent: true,
},
);
}
+fn credential_action(
+ config: &Config,
+ sid: &SourceId,
+ action: Action<'_>,
+ headers: Vec<String>,
+ args: &[&str],
+) -> CargoResult<CredentialResponse> {
+ let name = if sid.is_crates_io() {
+ Some(CRATES_IO_REGISTRY)
+ } else {
+ sid.alt_registry_key()
+ };
+ let registry = RegistryInfo {
+ index_url: sid.url().as_str(),
+ name,
+ headers,
+ };
+ let providers = credential_provider(config, sid)?;
+ let mut any_not_found = false;
+ for provider in providers {
+ let args: Vec<&str> = provider
+ .iter()
+ .map(String::as_str)
+ .chain(args.iter().map(|s| *s))
+ .collect();
+ let process = args[0];
+ tracing::debug!("attempting credential provider: {args:?}");
+ let provider: Box<dyn Credential> = match process {
+ "cargo:token" => Box::new(TokenCredential::new(config)),
+ "cargo:paseto" => Box::new(PasetoCredential::new(config)),
+ "cargo:token-from-stdout" => Box::new(BasicProcessCredential {}),
+ "cargo:wincred" => Box::new(cargo_credential_wincred::WindowsCredential {}),
+ "cargo:macos-keychain" => Box::new(cargo_credential_macos_keychain::MacKeychain {}),
+ "cargo:libsecret" => Box::new(cargo_credential_libsecret::LibSecretCredential {}),
+ process => Box::new(CredentialProcessCredential::new(process)),
+ };
+ config.shell().verbose(|c| {
+ c.status(
+ "Credential",
+ format!(
+ "{} {action} {}",
+ args.join(" "),
+ sid.display_registry_name()
+ ),
+ )
+ })?;
+ match provider.perform(&registry, &action, &args[1..]) {
+ Ok(response) => return Ok(response),
+ Err(cargo_credential::Error::UrlNotSupported) => {}
+ Err(cargo_credential::Error::NotFound) => any_not_found = true,
+ e => {
+ return e.with_context(|| {
+ format!(
+ "credential provider `{}` failed action `{action}`",
+ args.join(" ")
+ )
+ })
+ }
+ }
+ }
+ if any_not_found {
+ Err(cargo_credential::Error::NotFound.into())
+ } else {
+ anyhow::bail!("no credential providers could handle the request")
+ }
+}
+
/// Returns the token to use for the given registry.
/// If a `login_url` is provided and a token is not available, the
/// login_url will be included in the returned error.
@@ -422,9 +499,10 @@ pub fn auth_token(
config: &Config,
sid: &SourceId,
login_url: Option<&Url>,
- mutation: Option<Mutation<'_>>,
+ operation: Operation<'_>,
+ headers: Vec<String>,
) -> CargoResult<String> {
- match auth_token_optional(config, sid, mutation.as_ref())? {
+ match auth_token_optional(config, sid, operation, headers)? {
Some(token) => Ok(token.expose()),
None => Err(AuthorizationError {
sid: sid.clone(),
@@ -440,285 +518,99 @@ pub fn auth_token(
fn auth_token_optional(
config: &Config,
sid: &SourceId,
- mutation: Option<&'_ Mutation<'_>>,
+ operation: Operation<'_>,
+ headers: Vec<String>,
) -> CargoResult<Option<Secret<String>>> {
+ tracing::trace!("token requested for {}", sid.display_registry_name());
let mut cache = config.credential_cache();
let url = sid.canonical_url();
-
- if let Some(cache_token_value) = cache.get(url) {
- // Tokens for endpoints that do not involve a mutation can always be reused.
- // If the value is put in the cache by the command line, then we reuse it without looking at the configuration.
- if cache_token_value.from_commandline
- || cache_token_value.independent_of_endpoint
- || mutation.is_none()
+ if let Some(cached_token) = cache.get(url) {
+ if cached_token
+ .expiration
+ .map(|exp| OffsetDateTime::now_utc() + Duration::minutes(1) < exp)
+ .unwrap_or(true)
{
- return Ok(Some(cache_token_value.token_value.clone()));
+ if cached_token.operation_independent || matches!(operation, Operation::Read) {
+ tracing::trace!("using token from in-memory cache");
+ return Ok(Some(cached_token.token_value.clone()));
+ }
+ } else {
+ // Remove expired token from the cache
+ cache.remove(url);
}
}
- let credential = registry_credential_config(config, sid)?;
- let (independent_of_endpoint, token) = match credential {
- RegistryCredentialConfig::None => return Ok(None),
- RegistryCredentialConfig::Token(config_token) => (true, config_token),
- RegistryCredentialConfig::Process(process) => {
- // todo: PASETO with process
- let (independent_of_endpoint, token) =
- run_command(config, &process, sid, Action::Get)?.unwrap();
- (independent_of_endpoint, Secret::from(token))
- }
- cred @ RegistryCredentialConfig::AsymmetricKey(..) => {
- let token = asymmetric::public_token_from_credential(cred, sid, mutation)?;
- (false, token)
+ let credential_response = credential_action(config, sid, Action::Get(operation), headers, &[]);
+ if let Some(e) = credential_response.as_ref().err() {
+ if let Some(e) = e.downcast_ref::<cargo_credential::Error>() {
+ if matches!(e, cargo_credential::Error::NotFound) {
+ return Ok(None);
+ }
}
+ }
+ let credential_response = credential_response?;
+
+ let CredentialResponse::Get {
+ token,
+ cache: cache_control,
+ operation_independent,
+ } = credential_response
+ else {
+ bail!("credential provider produced unexpected response for `get` request: {credential_response:?}")
+ };
+ let token = Secret::from(token);
+ tracing::trace!("found token");
+ let expiration = match cache_control {
+ CacheControl::Expires(expiration) => Some(expiration),
+ CacheControl::Session => None,
+ CacheControl::Never | _ => return Ok(Some(token)),
};
- if independent_of_endpoint || mutation.is_none() {
- cache.insert(
- url.clone(),
- CredentialCacheValue {
- from_commandline: false,
- independent_of_endpoint,
- token_value: token.clone(),
- },
- );
- }
+ cache.insert(
+ url.clone(),
+ CredentialCacheValue {
+ token_value: token.clone(),
+ expiration,
+ operation_independent,
+ },
+ );
Ok(Some(token))
}
-/// A record of what kind of operation is happening that we should generate a token for.
-pub enum Mutation<'a> {
- /// Before we generate a crate file for the users attempt to publish,
- /// we need to check if we are configured correctly to generate a token.
- /// This variant is used to make sure that we can generate a token,
- /// to error out early if the token is not configured correctly.
- PrePublish,
- /// The user is attempting to publish a crate.
- Publish {
- /// The name of the crate
- name: &'a str,
- /// The version of the crate
- vers: &'a str,
- /// The checksum of the crate file being uploaded
- cksum: &'a str,
- },
- /// The user is attempting to yank a crate.
- Yank {
- /// The name of the crate
- name: &'a str,
- /// The version of the crate
- vers: &'a str,
- },
- /// The user is attempting to unyank a crate.
- Unyank {
- /// The name of the crate
- name: &'a str,
- /// The version of the crate
- vers: &'a str,
- },
- /// The user is attempting to modify the owners of a crate.
- Owners {
- /// The name of the crate
- name: &'a str,
- },
-}
-
-enum Action {
- Get,
- Store(String),
- Erase,
-}
-
-/// Saves the given token.
-pub fn login(config: &Config, sid: &SourceId, token: RegistryCredentialConfig) -> CargoResult<()> {
- match registry_credential_config(config, sid)? {
- RegistryCredentialConfig::Process(process) => {
- let token = token
- .as_token()
- .expect("credential_process cannot use login with a secret_key")
- .expose()
- .to_owned();
- run_command(config, &process, sid, Action::Store(token))?;
- }
- _ => {
- config::save_credentials(config, Some(token), &sid)?;
- }
- };
- Ok(())
-}
-
-/// Removes the token for the given registry.
+/// Log out from the given registry.
pub fn logout(config: &Config, sid: &SourceId) -> CargoResult<()> {
- match registry_credential_config(config, sid)? {
- RegistryCredentialConfig::Process(process) => {
- run_command(config, &process, sid, Action::Erase)?;
- }
- _ => {
- config::save_credentials(config, None, &sid)?;
+ let credential_response = credential_action(config, sid, Action::Logout, vec![], &[]);
+ if let Some(e) = credential_response.as_ref().err() {
+ if let Some(e) = e.downcast_ref::<cargo_credential::Error>() {
+ if matches!(e, cargo_credential::Error::NotFound) {
+ config.shell().status(
+ "Logout",
+ format!(
+ "not currently logged in to `{}`",
+ sid.display_registry_name()
+ ),
+ )?;
+ return Ok(());
+ }
}
+ }
+ let credential_response = credential_response?;
+ let CredentialResponse::Logout = credential_response else {
+ bail!("credential provider produced unexpected response for `logout` request: {credential_response:?}")
};
Ok(())
}
-fn run_command(
+/// Log in to the given registry.
+pub fn login(
config: &Config,
- process: &(PathBuf, Vec<String>),
sid: &SourceId,
- action: Action,
-) -> CargoResult<Option<(bool, String)>> {
- let index_url = sid.url().as_str();
- let cred_proc;
- let (exe, args) = if process.0.to_str().unwrap_or("").starts_with("cargo:") {
- cred_proc = sysroot_credential(config, process)?;
- &cred_proc
- } else {
- process
+ options: LoginOptions<'_>,
+ args: &[&str],
+) -> CargoResult<()> {
+ let credential_response = credential_action(config, sid, Action::Login(options), vec![], args)?;
+ let CredentialResponse::Login = credential_response else {
+ bail!("credential provider produced unexpected response for `login` request: {credential_response:?}")
};
- if !args.iter().any(|arg| arg.contains("{action}")) {
- let msg = |which| {
- format!(
- "credential process `{}` cannot be used to {}, \
- the credential-process configuration value must pass the \
- `{{action}}` argument in the config to support this command",
- exe.display(),
- which
- )
- };
- match action {
- Action::Get => {}
- Action::Store(_) => bail!(msg("log in")),
- Action::Erase => bail!(msg("log out")),
- }
- }
- // todo: PASETO with process
- let independent_of_endpoint = true;
- let action_str = match action {
- Action::Get => "get",
- Action::Store(_) => "store",
- Action::Erase => "erase",
- };
- let args: Vec<_> = args
- .iter()
- .map(|arg| {
- arg.replace("{action}", action_str)
- .replace("{index_url}", index_url)
- })
- .collect();
-
- let mut cmd = Command::new(&exe);
- cmd.args(args)
- .env(crate::CARGO_ENV, config.cargo_exe()?)
- .env("CARGO_REGISTRY_INDEX_URL", index_url);
- if sid.is_crates_io() {
- cmd.env("CARGO_REGISTRY_NAME_OPT", "crates-io");
- } else if let Some(name) = sid.alt_registry_key() {
- cmd.env("CARGO_REGISTRY_NAME_OPT", name);
- }
- match action {
- Action::Get => {
- cmd.stdout(Stdio::piped());
- }
- Action::Store(_) => {
- cmd.stdin(Stdio::piped());
- }
- Action::Erase => {}
- }
- let mut child = cmd.spawn().with_context(|| {
- let verb = match action {
- Action::Get => "fetch",
- Action::Store(_) => "store",
- Action::Erase => "erase",
- };
- format!(
- "failed to execute `{}` to {} authentication token for registry `{}`",
- exe.display(),
- verb,
- sid.display_registry_name(),
- )
- })?;
- let mut token = None;
- match &action {
- Action::Get => {
- let mut buffer = String::new();
- log::debug!("reading into buffer");
- child
- .stdout
- .as_mut()
- .unwrap()
- .read_to_string(&mut buffer)
- .with_context(|| {
- format!(
- "failed to read token from registry credential process `{}`",
- exe.display()
- )
- })?;
- if let Some(end) = buffer.find('\n') {
- if buffer.len() > end + 1 {
- bail!(
- "credential process `{}` returned more than one line of output; \
- expected a single token",
- exe.display()
- );
- }
- buffer.truncate(end);
- }
- token = Some((independent_of_endpoint, buffer));
- }
- Action::Store(token) => {
- writeln!(child.stdin.as_ref().unwrap(), "{}", token).with_context(|| {
- format!(
- "failed to send token to registry credential process `{}`",
- exe.display()
- )
- })?;
- }
- Action::Erase => {}
- }
- let status = child.wait().with_context(|| {
- format!(
- "registry credential process `{}` exit failure",
- exe.display()
- )
- })?;
- if !status.success() {
- let msg = match action {
- Action::Get => "failed to authenticate to registry",
- Action::Store(_) => "failed to store token to registry",
- Action::Erase => "failed to erase token from registry",
- };
- return Err(ProcessError::new(
- &format!(
- "registry credential process `{}` {} `{}`",
- exe.display(),
- msg,
- sid.display_registry_name()
- ),
- Some(status),
- None,
- )
- .into());
- }
- Ok(token)
-}
-
-/// Gets the path to the libexec processes in the sysroot.
-fn sysroot_credential(
- config: &Config,
- process: &(PathBuf, Vec<String>),
-) -> CargoResult<(PathBuf, Vec<String>)> {
- let cred_name = process.0.to_str().unwrap().strip_prefix("cargo:").unwrap();
- let cargo = config.cargo_exe()?;
- let root = cargo
- .parent()
- .and_then(|p| p.parent())
- .ok_or_else(|| format_err!("expected cargo path {}", cargo.display()))?;
- let exe = root.join("libexec").join(format!(
- "cargo-credential-{}{}",
- cred_name,
- std::env::consts::EXE_SUFFIX
- ));
- let mut args = process.1.clone();
- if !args.iter().any(|arg| arg == "{action}") {
- args.push("{action}".to_string());
- }
- Ok((exe, args))
+ Ok(())
}
diff --git a/src/tools/cargo/src/cargo/util/command_prelude.rs b/src/tools/cargo/src/cargo/util/command_prelude.rs
index 46ed7dd7c..bc707ef6f 100644
--- a/src/tools/cargo/src/cargo/util/command_prelude.rs
+++ b/src/tools/cargo/src/cargo/util/command_prelude.rs
@@ -26,6 +26,14 @@ pub use clap::Command;
use super::config::JobsConfig;
+pub mod heading {
+ pub const PACKAGE_SELECTION: &str = "Package Selection";
+ pub const TARGET_SELECTION: &str = "Target Selection";
+ pub const FEATURE_SELECTION: &str = "Feature Selection";
+ pub const COMPILATION_OPTIONS: &str = "Compilation Options";
+ pub const MANIFEST_OPTIONS: &str = "Manifest Options";
+}
+
pub trait CommandExt: Sized {
fn _arg(self, arg: Arg) -> Self;
@@ -37,8 +45,10 @@ pub trait CommandExt: Sized {
all: &'static str,
exclude: &'static str,
) -> Self {
- self.arg_package_spec_no_all(package, all, exclude)
- ._arg(flag("all", "Alias for --workspace (deprecated)"))
+ self.arg_package_spec_no_all(package, all, exclude)._arg(
+ flag("all", "Alias for --workspace (deprecated)")
+ .help_heading(heading::PACKAGE_SELECTION),
+ )
}
/// Variant of arg_package_spec that does not include the `--all` flag
@@ -51,33 +61,45 @@ pub trait CommandExt: Sized {
exclude: &'static str,
) -> Self {
self.arg_package_spec_simple(package)
- ._arg(flag("workspace", all))
- ._arg(multi_opt("exclude", "SPEC", exclude))
+ ._arg(flag("workspace", all).help_heading(heading::PACKAGE_SELECTION))
+ ._arg(multi_opt("exclude", "SPEC", exclude).help_heading(heading::PACKAGE_SELECTION))
}
fn arg_package_spec_simple(self, package: &'static str) -> Self {
- self._arg(optional_multi_opt("package", "SPEC", package).short('p'))
+ self._arg(
+ optional_multi_opt("package", "SPEC", package)
+ .short('p')
+ .help_heading(heading::PACKAGE_SELECTION),
+ )
}
fn arg_package(self, package: &'static str) -> Self {
self._arg(
optional_opt("package", package)
.short('p')
- .value_name("SPEC"),
+ .value_name("SPEC")
+ .help_heading(heading::PACKAGE_SELECTION),
)
}
fn arg_jobs(self) -> Self {
+ self.arg_jobs_without_keep_going()._arg(
+ flag(
+ "keep-going",
+ "Do not abort the build as soon as there is an error (unstable)",
+ )
+ .help_heading(heading::COMPILATION_OPTIONS),
+ )
+ }
+
+ fn arg_jobs_without_keep_going(self) -> Self {
self._arg(
opt("jobs", "Number of parallel jobs, defaults to # of CPUs.")
.short('j')
.value_name("N")
- .allow_hyphen_values(true),
+ .allow_hyphen_values(true)
+ .help_heading(heading::COMPILATION_OPTIONS),
)
- ._arg(flag(
- "keep-going",
- "Do not abort the build as soon as there is an error (unstable)",
- ))
}
fn arg_targets_all(
@@ -94,11 +116,13 @@ pub trait CommandExt: Sized {
all: &'static str,
) -> Self {
self.arg_targets_lib_bin_example(lib, bin, bins, example, examples)
- ._arg(flag("tests", tests))
- ._arg(optional_multi_opt("test", "NAME", test))
- ._arg(flag("benches", benches))
- ._arg(optional_multi_opt("bench", "NAME", bench))
- ._arg(flag("all-targets", all))
+ ._arg(flag("tests", tests).help_heading(heading::TARGET_SELECTION))
+ ._arg(optional_multi_opt("test", "NAME", test).help_heading(heading::TARGET_SELECTION))
+ ._arg(flag("benches", benches).help_heading(heading::TARGET_SELECTION))
+ ._arg(
+ optional_multi_opt("bench", "NAME", bench).help_heading(heading::TARGET_SELECTION),
+ )
+ ._arg(flag("all-targets", all).help_heading(heading::TARGET_SELECTION))
}
fn arg_targets_lib_bin_example(
@@ -109,11 +133,14 @@ pub trait CommandExt: Sized {
example: &'static str,
examples: &'static str,
) -> Self {
- self._arg(flag("lib", lib))
- ._arg(flag("bins", bins))
- ._arg(optional_multi_opt("bin", "NAME", bin))
- ._arg(flag("examples", examples))
- ._arg(optional_multi_opt("example", "NAME", example))
+ self._arg(flag("lib", lib).help_heading(heading::TARGET_SELECTION))
+ ._arg(flag("bins", bins).help_heading(heading::TARGET_SELECTION))
+ ._arg(optional_multi_opt("bin", "NAME", bin).help_heading(heading::TARGET_SELECTION))
+ ._arg(flag("examples", examples).help_heading(heading::TARGET_SELECTION))
+ ._arg(
+ optional_multi_opt("example", "NAME", example)
+ .help_heading(heading::TARGET_SELECTION),
+ )
}
fn arg_targets_bins_examples(
@@ -123,15 +150,21 @@ pub trait CommandExt: Sized {
example: &'static str,
examples: &'static str,
) -> Self {
- self._arg(optional_multi_opt("bin", "NAME", bin))
- ._arg(flag("bins", bins))
- ._arg(optional_multi_opt("example", "NAME", example))
- ._arg(flag("examples", examples))
+ self._arg(optional_multi_opt("bin", "NAME", bin).help_heading(heading::TARGET_SELECTION))
+ ._arg(flag("bins", bins).help_heading(heading::TARGET_SELECTION))
+ ._arg(
+ optional_multi_opt("example", "NAME", example)
+ .help_heading(heading::TARGET_SELECTION),
+ )
+ ._arg(flag("examples", examples).help_heading(heading::TARGET_SELECTION))
}
fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self {
- self._arg(optional_multi_opt("bin", "NAME", bin))
- ._arg(optional_multi_opt("example", "NAME", example))
+ self._arg(optional_multi_opt("bin", "NAME", bin).help_heading(heading::TARGET_SELECTION))
+ ._arg(
+ optional_multi_opt("example", "NAME", example)
+ .help_heading(heading::TARGET_SELECTION),
+ )
}
fn arg_features(self) -> Self {
@@ -141,21 +174,36 @@ pub trait CommandExt: Sized {
"FEATURES",
"Space or comma separated list of features to activate",
)
- .short('F'),
+ .short('F')
+ .help_heading(heading::FEATURE_SELECTION),
+ )
+ ._arg(
+ flag("all-features", "Activate all available features")
+ .help_heading(heading::FEATURE_SELECTION),
+ )
+ ._arg(
+ flag(
+ "no-default-features",
+ "Do not activate the `default` feature",
+ )
+ .help_heading(heading::FEATURE_SELECTION),
)
- ._arg(flag("all-features", "Activate all available features"))
- ._arg(flag(
- "no-default-features",
- "Do not activate the `default` feature",
- ))
}
fn arg_release(self, release: &'static str) -> Self {
- self._arg(flag("release", release).short('r'))
+ self._arg(
+ flag("release", release)
+ .short('r')
+ .help_heading(heading::COMPILATION_OPTIONS),
+ )
}
fn arg_profile(self, profile: &'static str) -> Self {
- self._arg(opt("profile", profile).value_name("PROFILE-NAME"))
+ self._arg(
+ opt("profile", profile)
+ .value_name("PROFILE-NAME")
+ .help_heading(heading::COMPILATION_OPTIONS),
+ )
}
fn arg_doc(self, doc: &'static str) -> Self {
@@ -163,17 +211,23 @@ pub trait CommandExt: Sized {
}
fn arg_target_triple(self, target: &'static str) -> Self {
- self._arg(multi_opt("target", "TRIPLE", target))
+ self._arg(multi_opt("target", "TRIPLE", target).help_heading(heading::COMPILATION_OPTIONS))
}
fn arg_target_dir(self) -> Self {
self._arg(
- opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"),
+ opt("target-dir", "Directory for all generated artifacts")
+ .value_name("DIRECTORY")
+ .help_heading(heading::COMPILATION_OPTIONS),
)
}
fn arg_manifest_path(self) -> Self {
- self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH"))
+ self._arg(
+ opt("manifest-path", "Path to Cargo.toml")
+ .value_name("PATH")
+ .help_heading(heading::MANIFEST_OPTIONS),
+ )
}
fn arg_message_format(self) -> Self {
@@ -181,14 +235,17 @@ pub trait CommandExt: Sized {
}
fn arg_build_plan(self) -> Self {
- self._arg(flag(
- "build-plan",
- "Output the build plan in JSON (unstable)",
- ))
+ self._arg(
+ flag("build-plan", "Output the build plan in JSON (unstable)")
+ .help_heading(heading::COMPILATION_OPTIONS),
+ )
}
fn arg_unit_graph(self) -> Self {
- self._arg(flag("unit-graph", "Output build graph in JSON (unstable)"))
+ self._arg(
+ flag("unit-graph", "Output build graph in JSON (unstable)")
+ .help_heading(heading::COMPILATION_OPTIONS),
+ )
}
fn arg_new_opts(self) -> Self {
@@ -252,7 +309,8 @@ pub trait CommandExt: Sized {
"Timing output formats (unstable) (comma separated): html, json",
)
.value_name("FMTS")
- .require_equals(true),
+ .require_equals(true)
+ .help_heading(heading::COMPILATION_OPTIONS),
)
}
}
diff --git a/src/tools/cargo/src/cargo/util/config/mod.rs b/src/tools/cargo/src/cargo/util/config/mod.rs
index 4e6bca302..cf977d38d 100644
--- a/src/tools/cargo/src/cargo/util/config/mod.rs
+++ b/src/tools/cargo/src/cargo/util/config/mod.rs
@@ -70,20 +70,21 @@ use crate::core::compiler::rustdoc::RustdocExternMap;
use crate::core::shell::Verbosity;
use crate::core::{features, CliUnstable, Shell, SourceId, Workspace, WorkspaceRootConfig};
use crate::ops::RegistryCredentialConfig;
-use crate::util::auth::Secret;
use crate::util::errors::CargoResult;
use crate::util::network::http::configure_http_handle;
use crate::util::network::http::http_handle;
-use crate::util::CanonicalUrl;
-use crate::util::{internal, toml as cargo_toml};
+use crate::util::toml as cargo_toml;
+use crate::util::{internal, CanonicalUrl};
use crate::util::{try_canonicalize, validate_package_name};
use crate::util::{FileLock, Filesystem, IntoUrl, IntoUrlWithBase, Rustc};
use anyhow::{anyhow, bail, format_err, Context as _};
+use cargo_credential::Secret;
use cargo_util::paths;
use curl::easy::Easy;
use lazycell::LazyCell;
use serde::de::IntoDeserializer as _;
use serde::Deserialize;
+use time::OffsetDateTime;
use toml_edit::Item;
use url::Url;
@@ -105,6 +106,8 @@ pub use target::{TargetCfgConfig, TargetConfig};
mod environment;
use environment::Env;
+use super::auth::RegistryConfig;
+
// Helper macro for creating typed access methods.
macro_rules! get_value_typed {
($name:ident, $ty:ty, $variant:ident, $expected:expr) => {
@@ -146,13 +149,9 @@ enum WhyLoad {
/// A previously generated authentication token and the data needed to determine if it can be reused.
#[derive(Debug)]
pub struct CredentialCacheValue {
- /// If the command line was used to override the token then it must always be reused,
- /// even if reading the configuration files would lead to a different value.
- pub from_commandline: bool,
- /// If nothing depends on which endpoint is being hit, then we can reuse the token
- /// for any future request even if some of the requests involve mutations.
- pub independent_of_endpoint: bool,
pub token_value: Secret<String>,
+ pub expiration: Option<OffsetDateTime>,
+ pub operation_independent: bool,
}
/// Configuration information for cargo. This is not specific to a build, it is information
@@ -211,6 +210,8 @@ pub struct Config {
/// Cache of credentials from configuration or credential providers.
/// Maps from url to credential value.
credential_cache: LazyCell<RefCell<HashMap<CanonicalUrl, CredentialCacheValue>>>,
+ /// Cache of registry config from from the `[registries]` table.
+ registry_config: LazyCell<RefCell<HashMap<SourceId, Option<RegistryConfig>>>>,
/// Lock, if held, of the global package cache along with the number of
/// acquisitions so far.
package_cache_lock: RefCell<Option<(Option<FileLock>, usize)>>,
@@ -302,6 +303,7 @@ impl Config {
env,
updated_sources: LazyCell::new(),
credential_cache: LazyCell::new(),
+ registry_config: LazyCell::new(),
package_cache_lock: RefCell::new(None),
http_config: LazyCell::new(),
future_incompat_config: LazyCell::new(),
@@ -491,6 +493,13 @@ impl Config {
.borrow_mut()
}
+ /// Cache of already parsed registries from the `[registries]` table.
+ pub(crate) fn registry_config(&self) -> RefMut<'_, HashMap<SourceId, Option<RegistryConfig>>> {
+ self.registry_config
+ .borrow_with(|| RefCell::new(HashMap::new()))
+ .borrow_mut()
+ }
+
/// Gets all config values from disk.
///
/// This will lazy-load the values as necessary. Callers are responsible
@@ -602,7 +611,7 @@ impl Config {
key: &ConfigKey,
vals: &HashMap<String, ConfigValue>,
) -> CargoResult<Option<ConfigValue>> {
- log::trace!("get cv {:?}", key);
+ tracing::trace!("get cv {:?}", key);
if key.is_root() {
// Returning the entire root table (for example `cargo config get`
// with no key). The definition here shouldn't matter.
@@ -810,7 +819,7 @@ impl Config {
///
/// See `get` for more details.
pub fn get_string(&self, key: &str) -> CargoResult<OptValue<String>> {
- self.get::<Option<Value<String>>>(key)
+ self.get::<OptValue<String>>(key)
}
/// Get a config value that is expected to be a path.
@@ -819,7 +828,7 @@ impl Config {
/// directory separators. See `ConfigRelativePath::resolve_program` for
/// more details.
pub fn get_path(&self, key: &str) -> CargoResult<OptValue<PathBuf>> {
- self.get::<Option<Value<ConfigRelativePath>>>(key).map(|v| {
+ self.get::<OptValue<ConfigRelativePath>>(key).map(|v| {
v.map(|v| Value {
val: v.val.resolve_program(self),
definition: v.definition,
@@ -2789,7 +2798,7 @@ fn disables_multiplexing_for_bad_curl(
.iter()
.any(|v| curl_version.starts_with(v))
{
- log::info!("disabling multiplexing with proxy, curl version is {curl_version}");
+ tracing::info!("disabling multiplexing with proxy, curl version is {curl_version}");
http.multiplexing = Some(false);
}
}
diff --git a/src/tools/cargo/src/cargo/util/config/path.rs b/src/tools/cargo/src/cargo/util/config/path.rs
index a90cab2b2..bc53ffcfa 100644
--- a/src/tools/cargo/src/cargo/util/config/path.rs
+++ b/src/tools/cargo/src/cargo/util/config/path.rs
@@ -10,6 +10,10 @@ use std::path::PathBuf;
pub struct ConfigRelativePath(Value<String>);
impl ConfigRelativePath {
+ pub fn new(path: Value<String>) -> ConfigRelativePath {
+ ConfigRelativePath(path)
+ }
+
/// Returns the underlying value.
pub fn value(&self) -> &Value<String> {
&self.0
@@ -49,7 +53,7 @@ impl ConfigRelativePath {
///
/// Typically you should use `ConfigRelativePath::resolve_program` on the path
/// to get the actual program.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, PartialEq)]
pub struct PathAndArgs {
pub path: ConfigRelativePath,
pub args: Vec<String>,
@@ -76,3 +80,22 @@ impl<'de> serde::Deserialize<'de> for PathAndArgs {
})
}
}
+
+impl PathAndArgs {
+ /// Construct a PathAndArgs from a string. The string will be split on ascii whitespace,
+ /// with the first item being treated as a `ConfigRelativePath` to the executable, and subsequent
+ /// items as arguments.
+ pub fn from_whitespace_separated_string(p: &Value<String>) -> PathAndArgs {
+ let mut iter = p.val.split_ascii_whitespace().map(str::to_string);
+ let val = iter.next().unwrap_or_default();
+ let args = iter.collect();
+ let crp = Value {
+ val,
+ definition: p.definition.clone(),
+ };
+ PathAndArgs {
+ path: ConfigRelativePath(crp),
+ args,
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/config/target.rs b/src/tools/cargo/src/cargo/util/config/target.rs
index cdafe73dd..b8aaf906d 100644
--- a/src/tools/cargo/src/cargo/util/config/target.rs
+++ b/src/tools/cargo/src/cargo/util/config/target.rs
@@ -45,7 +45,7 @@ pub(super) fn load_target_cfgs(config: &Config) -> CargoResult<Vec<(String, Targ
// rebuilds. We may perhaps one day wish to ensure a deterministic
// ordering via the order keys were defined in files perhaps.
let target: BTreeMap<String, TargetCfgConfig> = config.get("target")?;
- log::debug!("Got all targets {:#?}", target);
+ tracing::debug!("Got all targets {:#?}", target);
for (key, cfg) in target {
if key.starts_with("cfg(") {
// Unfortunately this is not able to display the location of the
diff --git a/src/tools/cargo/src/cargo/util/credential/adaptor.rs b/src/tools/cargo/src/cargo/util/credential/adaptor.rs
new file mode 100644
index 000000000..693e653b5
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/credential/adaptor.rs
@@ -0,0 +1,68 @@
+//! Credential provider that launches an external process that only outputs a credential
+
+use std::{
+ io::Read,
+ process::{Command, Stdio},
+};
+
+use anyhow::Context;
+use cargo_credential::{
+ Action, CacheControl, Credential, CredentialResponse, RegistryInfo, Secret,
+};
+
+pub struct BasicProcessCredential {}
+
+impl Credential for BasicProcessCredential {
+ fn perform(
+ &self,
+ registry: &RegistryInfo<'_>,
+ action: &Action<'_>,
+ args: &[&str],
+ ) -> Result<CredentialResponse, cargo_credential::Error> {
+ match action {
+ Action::Get(_) => {
+ let mut args = args.iter();
+ let exe = args.next()
+ .ok_or("The first argument to `cargo:token-from-stdout` must be a command that prints a token on stdout")?;
+ let args = args.map(|arg| arg.replace("{index_url}", registry.index_url));
+
+ let mut cmd = Command::new(exe);
+ cmd.args(args)
+ .env("CARGO_REGISTRY_INDEX_URL", registry.index_url);
+ if let Some(name) = registry.name {
+ cmd.env("CARGO_REGISTRY_NAME_OPT", name);
+ }
+ cmd.stdout(Stdio::piped());
+ let mut child = cmd.spawn().context("failed to spawn credential process")?;
+ let mut buffer = String::new();
+ child
+ .stdout
+ .take()
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .context("failed to read from credential provider")?;
+ if let Some(end) = buffer.find('\n') {
+ if buffer.len() > end + 1 {
+ return Err(format!(
+ "process `{}` returned more than one line of output; \
+ expected a single token",
+ exe
+ )
+ .into());
+ }
+ buffer.truncate(end);
+ }
+ let status = child.wait().context("credential process never started")?;
+ if !status.success() {
+ return Err(format!("process `{}` failed with status `{status}`", exe).into());
+ }
+ Ok(CredentialResponse::Get {
+ token: Secret::from(buffer),
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ }
+ _ => Err(cargo_credential::Error::OperationNotSupported),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/credential/mod.rs b/src/tools/cargo/src/cargo/util/credential/mod.rs
new file mode 100644
index 000000000..7baf7d2a1
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/credential/mod.rs
@@ -0,0 +1,8 @@
+//! Built-in Cargo credential providers
+
+#![allow(clippy::print_stderr)]
+
+pub mod adaptor;
+pub mod paseto;
+pub mod process;
+pub mod token;
diff --git a/src/tools/cargo/src/cargo/util/credential/paseto.rs b/src/tools/cargo/src/cargo/util/credential/paseto.rs
new file mode 100644
index 000000000..329d1f11c
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/credential/paseto.rs
@@ -0,0 +1,224 @@
+//! Credential provider that implements PASETO asymmetric tokens stored in Cargo's config.
+
+use anyhow::Context;
+use cargo_credential::{
+ Action, CacheControl, Credential, CredentialResponse, Error, Operation, RegistryInfo, Secret,
+};
+use clap::Command;
+use pasetors::{
+ keys::{AsymmetricKeyPair, AsymmetricPublicKey, AsymmetricSecretKey, Generate},
+ paserk::FormatAsPaserk,
+};
+use time::{format_description::well_known::Rfc3339, OffsetDateTime};
+use url::Url;
+
+use crate::{
+ core::SourceId,
+ ops::RegistryCredentialConfig,
+ util::{auth::registry_credential_config_raw, command_prelude::opt, config},
+ Config,
+};
+
+/// The main body of an asymmetric token as describe in RFC 3231.
+#[derive(serde::Serialize)]
+struct Message<'a> {
+ iat: &'a str,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ sub: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ mutation: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ name: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ vers: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ cksum: Option<&'a str>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ challenge: Option<&'a str>,
+ /// This field is not yet used. This field can be set to a value >1 to indicate a breaking change in the token format.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ v: Option<u8>,
+}
+/// The footer of an asymmetric token as describe in RFC 3231.
+#[derive(serde::Serialize)]
+struct Footer<'a> {
+ url: &'a str,
+ kip: pasetors::paserk::Id,
+}
+
+pub(crate) struct PasetoCredential<'a> {
+ config: &'a Config,
+}
+
+impl<'a> PasetoCredential<'a> {
+ pub fn new(config: &'a Config) -> Self {
+ Self { config }
+ }
+}
+
+impl<'a> Credential for PasetoCredential<'a> {
+ fn perform(
+ &self,
+ registry: &RegistryInfo<'_>,
+ action: &Action<'_>,
+ args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ let index_url = Url::parse(registry.index_url).context("parsing index url")?;
+ let sid = if let Some(name) = registry.name {
+ SourceId::for_alt_registry(&index_url, name)
+ } else {
+ SourceId::for_registry(&index_url)
+ }?;
+
+ let reg_cfg = registry_credential_config_raw(self.config, &sid)?;
+
+ let matches = Command::new("cargo:paseto")
+ .no_binary_name(true)
+ .arg(opt("key-subject", "Set the key subject for this registry").value_name("SUBJECT"))
+ .try_get_matches_from(args)
+ .map_err(Box::new)?;
+ let key_subject = matches.get_one("key-subject").map(String::as_str);
+
+ match action {
+ Action::Get(operation) => {
+ let Some(reg_cfg) = reg_cfg else {
+ return Err(Error::NotFound);
+ };
+ let Some(secret_key) = reg_cfg.secret_key.as_ref() else {
+ return Err(Error::NotFound);
+ };
+
+ let secret_key_subject = reg_cfg.secret_key_subject;
+ let secret: Secret<AsymmetricSecretKey<pasetors::version3::V3>> = secret_key
+ .val
+ .as_ref()
+ .map(|key| key.as_str().try_into())
+ .transpose()
+ .context("failed to load private key")?;
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = secret
+ .as_ref()
+ .map(|key| key.try_into())
+ .transpose()
+ .context("failed to load public key from private key")?
+ .expose();
+ let kip: pasetors::paserk::Id = (&public).into();
+
+ let iat = OffsetDateTime::now_utc();
+
+ let message = Message {
+ iat: &iat.format(&Rfc3339).unwrap(),
+ sub: secret_key_subject.as_deref(),
+ mutation: match operation {
+ Operation::Publish { .. } => Some("publish"),
+ Operation::Yank { .. } => Some("yank"),
+ Operation::Unyank { .. } => Some("unyank"),
+ Operation::Owners { .. } => Some("owners"),
+ _ => None,
+ },
+ name: match operation {
+ Operation::Publish { name, .. }
+ | Operation::Yank { name, .. }
+ | Operation::Unyank { name, .. }
+ | Operation::Owners { name, .. } => Some(name),
+ _ => None,
+ },
+ vers: match operation {
+ Operation::Publish { vers, .. }
+ | Operation::Yank { vers, .. }
+ | Operation::Unyank { vers, .. } => Some(vers),
+ _ => None,
+ },
+ cksum: match operation {
+ Operation::Publish { cksum, .. } => Some(cksum),
+ _ => None,
+ },
+ challenge: None, // todo: PASETO with challenges
+ v: None,
+ };
+ let footer = Footer {
+ url: &registry.index_url,
+ kip,
+ };
+
+ // Only read operations can be cached with asymmetric tokens.
+ let cache = match operation {
+ Operation::Read => CacheControl::Session,
+ _ => CacheControl::Never,
+ };
+
+ let token = secret
+ .map(|secret| {
+ pasetors::version3::PublicToken::sign(
+ &secret,
+ serde_json::to_string(&message)
+ .expect("cannot serialize")
+ .as_bytes(),
+ Some(
+ serde_json::to_string(&footer)
+ .expect("cannot serialize")
+ .as_bytes(),
+ ),
+ None,
+ )
+ })
+ .transpose()
+ .context("failed to sign request")?;
+
+ Ok(CredentialResponse::Get {
+ token,
+ cache,
+ operation_independent: false,
+ })
+ }
+ Action::Login(options) => {
+ let old_key_subject = reg_cfg.and_then(|cfg| cfg.secret_key_subject);
+ let new_token;
+ let secret_key: Secret<String>;
+ if let Some(key) = &options.token {
+ secret_key = key.clone().map(str::to_string);
+ } else {
+ let kp = AsymmetricKeyPair::<pasetors::version3::V3>::generate().unwrap();
+ secret_key = Secret::default().map(|mut key| {
+ FormatAsPaserk::fmt(&kp.secret, &mut key).unwrap();
+ key
+ });
+ }
+
+ if let Some(p) = paserk_public_from_paserk_secret(secret_key.as_deref()) {
+ eprintln!("{}", &p);
+ } else {
+ return Err("not a validly formatted PASERK secret key".into());
+ }
+ new_token = RegistryCredentialConfig::AsymmetricKey((
+ secret_key,
+ match key_subject {
+ Some(key_subject) => Some(key_subject.to_string()),
+ None => old_key_subject,
+ },
+ ));
+ config::save_credentials(self.config, Some(new_token), &sid)?;
+ Ok(CredentialResponse::Login)
+ }
+ Action::Logout => {
+ config::save_credentials(self.config, None, &sid)?;
+ Ok(CredentialResponse::Logout)
+ }
+ _ => Err(Error::OperationNotSupported),
+ }
+ }
+}
+
+/// Checks that a secret key is valid, and returns the associated public key in Paserk format.
+pub(crate) fn paserk_public_from_paserk_secret(secret_key: Secret<&str>) -> Option<String> {
+ let secret: Secret<AsymmetricSecretKey<pasetors::version3::V3>> =
+ secret_key.map(|key| key.try_into()).transpose().ok()?;
+ let public: AsymmetricPublicKey<pasetors::version3::V3> = secret
+ .as_ref()
+ .map(|key| key.try_into())
+ .transpose()
+ .ok()?
+ .expose();
+ let mut paserk_pub_key = String::new();
+ FormatAsPaserk::fmt(&public, &mut paserk_pub_key).unwrap();
+ Some(paserk_pub_key)
+}
diff --git a/src/tools/cargo/src/cargo/util/credential/process.rs b/src/tools/cargo/src/cargo/util/credential/process.rs
new file mode 100644
index 000000000..89eac1af6
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/credential/process.rs
@@ -0,0 +1,80 @@
+//! Credential provider that launches an external process using Cargo's credential
+//! protocol.
+
+use std::{
+ io::{BufRead, BufReader, Write},
+ path::PathBuf,
+ process::{Command, Stdio},
+};
+
+use anyhow::Context;
+use cargo_credential::{
+ Action, Credential, CredentialHello, CredentialRequest, CredentialResponse, RegistryInfo,
+};
+
+pub struct CredentialProcessCredential {
+ path: PathBuf,
+}
+
+impl<'a> CredentialProcessCredential {
+ pub fn new(path: &str) -> Self {
+ Self {
+ path: PathBuf::from(path),
+ }
+ }
+}
+
+impl<'a> Credential for CredentialProcessCredential {
+ fn perform(
+ &self,
+ registry: &RegistryInfo<'_>,
+ action: &Action<'_>,
+ args: &[&str],
+ ) -> Result<CredentialResponse, cargo_credential::Error> {
+ let mut cmd = Command::new(&self.path);
+ cmd.stdout(Stdio::piped());
+ cmd.stdin(Stdio::piped());
+ cmd.arg("--cargo-plugin");
+ tracing::debug!("credential-process: {cmd:?}");
+ let mut child = cmd.spawn().context("failed to spawn credential process")?;
+ let mut output_from_child = BufReader::new(child.stdout.take().unwrap());
+ let mut input_to_child = child.stdin.take().unwrap();
+ let mut buffer = String::new();
+ output_from_child
+ .read_line(&mut buffer)
+ .context("failed to read hello from credential provider")?;
+ let credential_hello: CredentialHello =
+ serde_json::from_str(&buffer).context("failed to deserialize hello")?;
+ tracing::debug!("credential-process > {credential_hello:?}");
+
+ let req = CredentialRequest {
+ v: cargo_credential::PROTOCOL_VERSION_1,
+ action: action.clone(),
+ registry: registry.clone(),
+ args: args.to_vec(),
+ };
+ let request = serde_json::to_string(&req).context("failed to serialize request")?;
+ tracing::debug!("credential-process < {req:?}");
+ writeln!(input_to_child, "{request}").context("failed to write to credential provider")?;
+
+ buffer.clear();
+ output_from_child
+ .read_line(&mut buffer)
+ .context("failed to read response from credential provider")?;
+ let response: Result<CredentialResponse, cargo_credential::Error> =
+ serde_json::from_str(&buffer).context("failed to deserialize response")?;
+ tracing::debug!("credential-process > {response:?}");
+ drop(input_to_child);
+ let status = child.wait().context("credential process never started")?;
+ if !status.success() {
+ return Err(anyhow::anyhow!(
+ "credential process `{}` failed with status {}`",
+ self.path.display(),
+ status
+ )
+ .into());
+ }
+ tracing::trace!("credential process exited successfully");
+ response
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/credential/token.rs b/src/tools/cargo/src/cargo/util/credential/token.rs
new file mode 100644
index 000000000..7a29e6360
--- /dev/null
+++ b/src/tools/cargo/src/cargo/util/credential/token.rs
@@ -0,0 +1,96 @@
+//! Credential provider that uses plaintext tokens in Cargo's config.
+
+use anyhow::Context;
+use cargo_credential::{Action, CacheControl, Credential, CredentialResponse, Error, RegistryInfo};
+use url::Url;
+
+use crate::{
+ core::SourceId,
+ ops::RegistryCredentialConfig,
+ util::{auth::registry_credential_config_raw, config},
+ Config,
+};
+
+pub struct TokenCredential<'a> {
+ config: &'a Config,
+}
+
+impl<'a> TokenCredential<'a> {
+ pub fn new(config: &'a Config) -> Self {
+ Self { config }
+ }
+}
+
+impl<'a> Credential for TokenCredential<'a> {
+ fn perform(
+ &self,
+ registry: &RegistryInfo<'_>,
+ action: &Action<'_>,
+ _args: &[&str],
+ ) -> Result<CredentialResponse, Error> {
+ let index_url = Url::parse(registry.index_url).context("parsing index url")?;
+ let sid = if let Some(name) = registry.name {
+ SourceId::for_alt_registry(&index_url, name)
+ } else {
+ SourceId::for_registry(&index_url)
+ }?;
+ let previous_token =
+ registry_credential_config_raw(self.config, &sid)?.and_then(|c| c.token);
+
+ match action {
+ Action::Get(_) => {
+ let token = previous_token.ok_or_else(|| Error::NotFound)?.val;
+ Ok(CredentialResponse::Get {
+ token,
+ cache: CacheControl::Session,
+ operation_independent: true,
+ })
+ }
+ Action::Login(options) => {
+ // Automatically remove `cargo login` from an inputted token to
+ // allow direct pastes from `registry.host()`/me.
+ let new_token = cargo_credential::read_token(options, registry)?
+ .map(|line| line.replace("cargo login", "").trim().to_string());
+
+ crates_io::check_token(new_token.as_ref().expose()).map_err(Box::new)?;
+ config::save_credentials(
+ self.config,
+ Some(RegistryCredentialConfig::Token(new_token)),
+ &sid,
+ )?;
+ let _ = self.config.shell().status(
+ "Login",
+ format!("token for `{}` saved", sid.display_registry_name()),
+ );
+ Ok(CredentialResponse::Login)
+ }
+ Action::Logout => {
+ if previous_token.is_none() {
+ return Err(Error::NotFound);
+ }
+ let reg_name = sid.display_registry_name();
+ config::save_credentials(self.config, None, &sid)?;
+ let _ = self.config.shell().status(
+ "Logout",
+ format!("token for `{reg_name}` has been removed from local storage"),
+ );
+ let location = if sid.is_crates_io() {
+ "<https://crates.io/me>".to_string()
+ } else {
+ // The URL for the source requires network access to load the config.
+ // That could be a fairly heavy operation to perform just to provide a
+ // help message, so for now this just provides some generic text.
+ // Perhaps in the future this could have an API to fetch the config if
+ // it is cached, but avoid network access otherwise?
+ format!("the `{reg_name}` website")
+ };
+ eprintln!(
+ "note: This does not revoke the token on the registry server.\n \
+ If you need to revoke the token, visit {location} and follow the instructions there."
+ );
+ Ok(CredentialResponse::Logout)
+ }
+ _ => Err(Error::OperationNotSupported),
+ }
+ }
+}
diff --git a/src/tools/cargo/src/cargo/util/diagnostic_server.rs b/src/tools/cargo/src/cargo/util/diagnostic_server.rs
index 36215735b..f8eeabfc2 100644
--- a/src/tools/cargo/src/cargo/util/diagnostic_server.rs
+++ b/src/tools/cargo/src/cargo/util/diagnostic_server.rs
@@ -11,8 +11,8 @@ use std::thread::{self, JoinHandle};
use anyhow::{Context, Error};
use cargo_util::ProcessBuilder;
-use log::warn;
use serde::{Deserialize, Serialize};
+use tracing::warn;
use crate::core::Edition;
use crate::util::errors::CargoResult;
diff --git a/src/tools/cargo/src/cargo/util/errors.rs b/src/tools/cargo/src/cargo/util/errors.rs
index 5c7eebcdb..9589e1ae3 100644
--- a/src/tools/cargo/src/cargo/util/errors.rs
+++ b/src/tools/cargo/src/cargo/util/errors.rs
@@ -83,8 +83,21 @@ impl HttpNotSuccessful {
}
write!(result, ", got {}\n", self.code).unwrap();
if show_headers {
- if !self.headers.is_empty() {
- write!(result, "debug headers:\n{}\n", self.headers.join("\n")).unwrap();
+ let headers: Vec<_> = self
+ .headers
+ .iter()
+ .filter(|header| {
+ let Some((name, _)) = header.split_once(":") else {
+ return false;
+ };
+ DEBUG_HEADERS.contains(&name.to_ascii_lowercase().trim())
+ })
+ .collect();
+ if !headers.is_empty() {
+ writeln!(result, "debug headers:").unwrap();
+ for header in headers {
+ writeln!(result, "{header}").unwrap();
+ }
}
}
write!(result, "body:\n{body}").unwrap();
diff --git a/src/tools/cargo/src/cargo/util/job.rs b/src/tools/cargo/src/cargo/util/job.rs
index f2bcf94a2..1d68fc433 100644
--- a/src/tools/cargo/src/cargo/util/job.rs
+++ b/src/tools/cargo/src/cargo/util/job.rs
@@ -49,7 +49,7 @@ mod imp {
use std::ptr;
use std::ptr::addr_of;
- use log::info;
+ use tracing::info;
use windows_sys::Win32::Foundation::CloseHandle;
use windows_sys::Win32::Foundation::HANDLE;
diff --git a/src/tools/cargo/src/cargo/util/mod.rs b/src/tools/cargo/src/cargo/util/mod.rs
index df8dcb0ac..26e97e2d2 100644
--- a/src/tools/cargo/src/cargo/util/mod.rs
+++ b/src/tools/cargo/src/cargo/util/mod.rs
@@ -36,6 +36,7 @@ pub mod command_prelude;
pub mod config;
mod counter;
pub mod cpu;
+pub mod credential;
mod dependency_queue;
pub mod diagnostic_server;
pub mod edit_distance;
diff --git a/src/tools/cargo/src/cargo/util/network/http.rs b/src/tools/cargo/src/cargo/util/network/http.rs
index f077ce2b6..73880f60e 100644
--- a/src/tools/cargo/src/cargo/util/network/http.rs
+++ b/src/tools/cargo/src/cargo/util/network/http.rs
@@ -8,8 +8,8 @@ use curl::easy::Easy;
use curl::easy::InfoType;
use curl::easy::SslOpt;
use curl::easy::SslVersion;
-use log::log;
-use log::Level;
+use tracing::debug;
+use tracing::trace;
use crate::util::config::SslVersionConfig;
use crate::util::config::SslVersionConfigRange;
@@ -135,18 +135,25 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
if let Some(true) = http.debug {
handle.verbose(true)?;
- log::debug!("{:#?}", curl::Version::get());
+ tracing::debug!("{:#?}", curl::Version::get());
handle.debug_function(|kind, data| {
+ enum LogLevel {
+ Debug,
+ Trace,
+ }
+ use LogLevel::*;
let (prefix, level) = match kind {
- InfoType::Text => ("*", Level::Debug),
- InfoType::HeaderIn => ("<", Level::Debug),
- InfoType::HeaderOut => (">", Level::Debug),
- InfoType::DataIn => ("{", Level::Trace),
- InfoType::DataOut => ("}", Level::Trace),
+ InfoType::Text => ("*", Debug),
+ InfoType::HeaderIn => ("<", Debug),
+ InfoType::HeaderOut => (">", Debug),
+ InfoType::DataIn => ("{", Trace),
+ InfoType::DataOut => ("}", Trace),
InfoType::SslDataIn | InfoType::SslDataOut => return,
_ => return,
};
let starts_with_ignore_case = |line: &str, text: &str| -> bool {
+ let line = line.as_bytes();
+ let text = text.as_bytes();
line[..line.len().min(text.len())].eq_ignore_ascii_case(text)
};
match str::from_utf8(data) {
@@ -159,16 +166,18 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
} else if starts_with_ignore_case(line, "set-cookie") {
line = "set-cookie: [REDACTED]";
}
- log!(level, "http-debug: {} {}", prefix, line);
+ match level {
+ Debug => debug!("http-debug: {prefix} {line}"),
+ Trace => trace!("http-debug: {prefix} {line}"),
+ }
}
}
Err(_) => {
- log!(
- level,
- "http-debug: {} ({} bytes of data)",
- prefix,
- data.len()
- );
+ let len = data.len();
+ match level {
+ Debug => debug!("http-debug: {prefix} ({len} bytes of data)"),
+ Trace => trace!("http-debug: {prefix} ({len} bytes of data)"),
+ }
}
}
})?;
diff --git a/src/tools/cargo/src/cargo/util/network/mod.rs b/src/tools/cargo/src/cargo/util/network/mod.rs
index b078fa352..5db594945 100644
--- a/src/tools/cargo/src/cargo/util/network/mod.rs
+++ b/src/tools/cargo/src/cargo/util/network/mod.rs
@@ -29,7 +29,7 @@ macro_rules! try_old_curl {
let result = $e;
if cfg!(target_os = "macos") {
if let Err(e) = result {
- ::log::warn!("ignoring libcurl {} error: {}", $msg, e);
+ ::tracing::warn!("ignoring libcurl {} error: {}", $msg, e);
}
} else {
use ::anyhow::Context;
diff --git a/src/tools/cargo/src/cargo/util/network/sleep.rs b/src/tools/cargo/src/cargo/util/network/sleep.rs
index d4105065e..fab53263b 100644
--- a/src/tools/cargo/src/cargo/util/network/sleep.rs
+++ b/src/tools/cargo/src/cargo/util/network/sleep.rs
@@ -68,7 +68,7 @@ impl<T> SleepTracker<T> {
let now = Instant::now();
let mut result = Vec::new();
while let Some(next) = self.heap.peek() {
- log::debug!("ERIC: now={now:?} next={:?}", next.wakeup);
+ tracing::debug!("ERIC: now={now:?} next={:?}", next.wakeup);
if next.wakeup < now {
result.push(self.heap.pop().unwrap().data);
} else {
diff --git a/src/tools/cargo/src/cargo/util/restricted_names.rs b/src/tools/cargo/src/cargo/util/restricted_names.rs
index be1811a88..2c3eaa9e1 100644
--- a/src/tools/cargo/src/cargo/util/restricted_names.rs
+++ b/src/tools/cargo/src/cargo/util/restricted_names.rs
@@ -87,23 +87,22 @@ pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<
pub fn sanitize_package_name(name: &str, placeholder: char) -> String {
let mut slug = String::new();
let mut chars = name.chars();
- if let Some(ch) = chars.next() {
- if ch.is_digit(10) {
- slug.push(placeholder);
- slug.push(ch);
- } else if unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' {
+ while let Some(ch) = chars.next() {
+ if (unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') && !ch.is_digit(10) {
slug.push(ch);
- } else {
- slug.push(placeholder);
+ break;
}
}
- for ch in chars {
+ while let Some(ch) = chars.next() {
if unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' {
slug.push(ch);
} else {
slug.push(placeholder);
}
}
+ if slug.is_empty() {
+ slug.push_str("package");
+ }
slug
}
diff --git a/src/tools/cargo/src/cargo/util/rustc.rs b/src/tools/cargo/src/cargo/util/rustc.rs
index 3f1da64d4..238145af6 100644
--- a/src/tools/cargo/src/cargo/util/rustc.rs
+++ b/src/tools/cargo/src/cargo/util/rustc.rs
@@ -6,8 +6,8 @@ use std::sync::Mutex;
use anyhow::Context as _;
use cargo_util::{paths, ProcessBuilder, ProcessError};
-use log::{debug, info, warn};
use serde::{Deserialize, Serialize};
+use tracing::{debug, info, warn};
use crate::util::interning::InternedString;
use crate::util::{profile, CargoResult, Config, StableHasher};
diff --git a/src/tools/cargo/src/cargo/util/toml/embedded.rs b/src/tools/cargo/src/cargo/util/toml/embedded.rs
index 8e41010b4..395430c1b 100644
--- a/src/tools/cargo/src/cargo/util/toml/embedded.rs
+++ b/src/tools/cargo/src/cargo/util/toml/embedded.rs
@@ -18,7 +18,7 @@ pub fn expand_manifest(
let comment = match extract_comment(content) {
Ok(comment) => Some(comment),
Err(err) => {
- log::trace!("failed to extract doc comment: {err}");
+ tracing::trace!("failed to extract doc comment: {err}");
None
}
}
@@ -26,7 +26,7 @@ pub fn expand_manifest(
let manifest = match extract_manifest(&comment)? {
Some(manifest) => Some(manifest),
None => {
- log::trace!("failed to extract manifest");
+ tracing::trace!("failed to extract manifest");
None
}
}
@@ -84,7 +84,7 @@ fn expand_manifest_(
.or_insert_with(|| toml::Value::String(DEFAULT_VERSION.to_owned()));
package.entry("edition".to_owned()).or_insert_with(|| {
let _ = config.shell().warn(format_args!(
- "`package.edition` is unspecifiead, defaulting to `{}`",
+ "`package.edition` is unspecified, defaulting to `{}`",
DEFAULT_EDITION
));
toml::Value::String(DEFAULT_EDITION.to_string())
@@ -207,7 +207,11 @@ impl DocFragment {
let syn::Meta::NameValue(nv) = &attr.meta else {
anyhow::bail!("unsupported attr meta for {:?}", attr.meta.path())
};
- let syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(lit), .. }) = &nv.value else {
+ let syn::Expr::Lit(syn::ExprLit {
+ lit: syn::Lit::Str(lit),
+ ..
+ }) = &nv.value
+ else {
anyhow::bail!("only string literals are supported")
};
Ok(Self {
@@ -373,16 +377,21 @@ fn unindent_doc_fragments(docs: &mut [DocFragment]) {
let Some(min_indent) = docs
.iter()
.map(|fragment| {
- fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| {
- if line.chars().all(|c| c.is_whitespace()) {
- min_indent
- } else {
- // Compare against either space or tab, ignoring whether they are
- // mixed or not.
- let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count();
- min_indent.min(whitespace)
- }
- })
+ fragment
+ .doc
+ .as_str()
+ .lines()
+ .fold(usize::MAX, |min_indent, line| {
+ if line.chars().all(|c| c.is_whitespace()) {
+ min_indent
+ } else {
+ // Compare against either space or tab, ignoring whether they are
+ // mixed or not.
+ let whitespace =
+ line.chars().take_while(|c| *c == ' ' || *c == '\t').count();
+ min_indent.min(whitespace)
+ }
+ })
})
.min()
else {
diff --git a/src/tools/cargo/src/cargo/util/toml/mod.rs b/src/tools/cargo/src/cargo/util/toml/mod.rs
index 2202f6b3b..963c4afaa 100644
--- a/src/tools/cargo/src/cargo/util/toml/mod.rs
+++ b/src/tools/cargo/src/cargo/util/toml/mod.rs
@@ -11,12 +11,12 @@ use cargo_platform::Platform;
use cargo_util::paths;
use itertools::Itertools;
use lazycell::LazyCell;
-use log::{debug, trace};
use semver::{self, VersionReq};
use serde::de::IntoDeserializer as _;
use serde::de::{self, Unexpected};
use serde::ser;
use serde::{Deserialize, Serialize};
+use tracing::{debug, trace};
use url::Url;
use crate::core::compiler::{CompileKind, CompileTarget};
@@ -2835,7 +2835,9 @@ fn parse_unstable_lints<T: Deserialize<'static>>(
config: &Config,
warnings: &mut Vec<String>,
) -> CargoResult<Option<T>> {
- let Some(lints) = lints else { return Ok(None); };
+ let Some(lints) = lints else {
+ return Ok(None);
+ };
if !config.cli_unstable().lints {
warn_for_lint_feature(config, warnings);
@@ -2878,7 +2880,9 @@ switch to nightly channel you can pass
}
fn verify_lints(lints: Option<TomlLints>) -> CargoResult<Option<TomlLints>> {
- let Some(lints) = lints else { return Ok(None); };
+ let Some(lints) = lints else {
+ return Ok(None);
+ };
for (tool, lints) in &lints {
let supported = ["rust", "clippy", "rustdoc"];
diff --git a/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs b/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
index 1b24833c1..2f39b7ab4 100644
--- a/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
+++ b/src/tools/cargo/src/cargo/util/toml_mut/dependency.rs
@@ -881,7 +881,11 @@ impl GitSource {
impl std::fmt::Display for GitSource {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let git_ref = self.git_ref();
- if let Some(pretty_ref) = git_ref.pretty_ref() {
+
+ // TODO(-Znext-lockfile-bump): set it to true when stabilizing
+ // lockfile v4, because we want Source ID serialization to be
+ // consistent with lockfile.
+ if let Some(pretty_ref) = git_ref.pretty_ref(false) {
write!(f, "{}?{}", self.git, pretty_ref)
} else {
write!(f, "{}", self.git)
diff --git a/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md b/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md
index e148d72c3..03940e2ff 100644
--- a/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md
+++ b/src/tools/cargo/src/doc/contrib/src/implementation/debugging.md
@@ -2,11 +2,11 @@
## Logging
-Cargo uses the [`env_logger`] crate to display debug log messages. The
-`CARGO_LOG` environment variable can be set to enable debug logging, with a
-value such as `trace`, `debug`, or `warn`. It also supports filtering for
-specific modules. Feel free to use the standard [`log`] macros to help with
-diagnosing problems.
+Cargo uses the [`tracing`] crate to display debug log messages.
+The `CARGO_LOG` environment variable can be set to enable debug logging, with a value such as `trace`, `debug`, or `warn`.
+It also supports filtering for specific modules with comma-separated [directives].
+Feel free to use [shorthand macros] to help with diagnosing problems.
+We're looking forward to making Cargo logging mechanism more structural!
```sh
# Outputs all logs with levels debug and higher
@@ -22,5 +22,6 @@ CARGO_HTTP_DEBUG=true CARGO_LOG=cargo::ops::registry=debug cargo fetch
CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build
```
-[`env_logger`]: https://docs.rs/env_logger
-[`log`]: https://docs.rs/log
+[`tracing`]: https://docs.rs/tracing
+[directive]: https://docs.rs/tracing_subscriber/filter/struct.EnvFilter.html#directives
+[shorthand macros]: https://docs.rs/tracing/index.html#shorthand-macros
diff --git a/src/tools/cargo/src/doc/contrib/src/process/release.md b/src/tools/cargo/src/doc/contrib/src/process/release.md
index f0de267c8..169d63ed8 100644
--- a/src/tools/cargo/src/doc/contrib/src/process/release.md
+++ b/src/tools/cargo/src/doc/contrib/src/process/release.md
@@ -127,11 +127,38 @@ module].
## crates.io publishing
-Cargo's library is published to [crates.io] as part of the stable release
-process. This is handled by the [Release team] as part of their process. There
-is a [`publish.py` script] that in theory should help with this process. The
-test and build tool crates aren't published.
-
+Cargo's library and its related dependencies (like `cargo-util`) are published
+to [crates.io] as part of the 6-week stable release process by the [Release
+team]. There is a [`publish.py` script] that is used by the Release team's
+automation scripts (see <https://github.com/rust-lang/simpleinfra/>) to handle
+determining which packages to publish. The test and build tool crates aren't
+published. This runs on the specific git commit associated with the cargo
+submodule in the `stable` branch in `rust-lang/rust` at the time of release.
+
+On very rare cases, the Cargo team may decide to manually publish a new
+release to [crates.io]. For example, this may be necessary if there is a
+problem with the current version that only affects API users, and does not
+affect the `cargo` binary shipped in the stable release. In this situation,
+PRs should be merged to the associated stable release branch in the cargo repo
+(like `rust-1.70.0`) that fix the issue and bump the patch version of the
+affected package. Then someone with permissions (currently a subset of the
+Cargo team, or the Release team) should publish it manually using `cargo
+publish`.
+
+Some packages are not published automatically because they are not part of the
+Rust release train. These currently include all of the [`credential`] packages
+and the [`home`] package. These are published manually on an as-needed or
+as-requested basis by whoever has permissions (currently [@ehuss] or the
+Release/Infra team).
+
+In the future, these manual publishing options should be integrated with
+GitHub Actions so that any team member can trigger them. Likely that should
+involve getting Infra to create scoped tokens that can be added as GitHub
+Secrets, and setting up GitHub Actions workflows with the appropriate
+permissions which can be manually triggered to launch a release.
+
+[`home`]: https://github.com/rust-lang/cargo/tree/master/crates/home
+[`credential`]: https://github.com/rust-lang/cargo/tree/master/credential
[`publish.py` script]: https://github.com/rust-lang/cargo/blob/master/publish.py
## Beta backports
diff --git a/src/tools/cargo/src/doc/man/cargo-metadata.md b/src/tools/cargo/src/doc/man/cargo-metadata.md
index 0027a20b1..8efb29a97 100644
--- a/src/tools/cargo/src/doc/man/cargo-metadata.md
+++ b/src/tools/cargo/src/doc/man/cargo-metadata.md
@@ -13,15 +13,34 @@ cargo-metadata --- Machine-readable metadata about the current package
Output JSON to stdout containing information about the workspace members and
resolved dependencies of the current package.
-It is recommended to include the `--format-version` flag to future-proof
-your code to ensure the output is in the format you are expecting.
+The format of the output is subject to change in futures versions of Cargo. It
+is recommended to include the `--format-version` flag to future-proof your code
+to ensure the output is in the format you are expecting. For more on the
+expectations, see ["Compatibility"](#compatibility).
See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata)
for a Rust API for reading the metadata.
## OUTPUT FORMAT
-The output has the following format:
+### Compatibility
+
+Within the same output format version, the compatibility is maintained, except
+some scenarios. The following is a non-exhaustive list of changes that are not
+considersed as incompatibile:
+
+* **Adding new fields** — New fields will be added when needed. Reserving this
+ helps Cargo evolve without bumping the format version too often.
+* **Adding new values for enum-like fields** — Same as adding new fields. It
+ keeps metadata evolving without stagnation.
+* **Changing opaque representations** — The inner representations of some
+ fields are implementation details. For example, fields related to "Package ID"
+ or "Source ID" are treated as opaque identifiers to differentiate packages or
+ sources. Consumers shouldn't rely on those representations unless specified.
+
+### JSON format
+
+The JSON output has the following format:
```javascript
{
@@ -34,7 +53,9 @@ The output has the following format:
"name": "my-package",
/* The version of the package. */
"version": "0.1.0",
- /* The Package ID, a unique identifier for referring to the package. */
+ /* The Package ID, an opaque and unique identifier for referring to the
+ package. See "Compatibility" above for the stability guarantee.
+ */
"id": "my-package 0.1.0 (path+file:///path/to/my-package)",
/* The license value from the manifest, or null. */
"license": "MIT/Apache-2.0",
@@ -42,14 +63,25 @@ The output has the following format:
"license_file": "LICENSE",
/* The description value from the manifest, or null. */
"description": "Package description.",
- /* The source ID of the package. This represents where
- a package is retrieved from.
+ /* The source ID of the package, an "opaque" identifier representing
+ where a package is retrieved from. See "Compatibility" above for
+ the stability guarantee.
+
This is null for path dependencies and workspace members.
+
For other dependencies, it is a string with the format:
- "registry+URL" for registry-based dependencies.
Example: "registry+https://github.com/rust-lang/crates.io-index"
- "git+URL" for git-based dependencies.
Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ - "sparse+URL" for dependencies from a sparse registry
+ Example: "sparse+https://my-sparse-registry.org"
+
+ The value after the `+` is not explicitly defined, and may change
+ between versions of Cargo and may not directly correlate to other
+ things, such as registry definitions in a config file. New source
+ kinds may be added in the future which will have different `+`
+ prefixed identifiers.
*/
"source": null,
/* Array of dependencies declared in the package's manifest. */
diff --git a/src/tools/cargo/src/doc/man/cargo-test.md b/src/tools/cargo/src/doc/man/cargo-test.md
index 75bf72b30..cba98b20d 100644
--- a/src/tools/cargo/src/doc/man/cargo-test.md
+++ b/src/tools/cargo/src/doc/man/cargo-test.md
@@ -186,7 +186,6 @@ includes an option to control the number of threads used:
{{#options}}
{{> options-jobs }}
-{{> options-keep-going }}
{{> options-future-incompat }}
{{/options}}
diff --git a/src/tools/cargo/src/doc/man/cargo-yank.md b/src/tools/cargo/src/doc/man/cargo-yank.md
index 946e98c85..2d7c117bf 100644
--- a/src/tools/cargo/src/doc/man/cargo-yank.md
+++ b/src/tools/cargo/src/doc/man/cargo-yank.md
@@ -46,7 +46,7 @@ requirements, following a given release being yanked:
|-------------------------------------|-----------------------------------------|------------------|------------------|
| `1.5.0` | Use either `1.5.1` or `1.5.2` | **Return Error** | Use `2.0.0` |
| `1.5.1` | Use either `1.5.0` or `1.5.2` | Use `1.5.0` | Use `2.0.0` |
-| `2.0.0` | Use either `1.5.0`, `1.5.1` or `0.22.2` | Use `1.5.0` | **Return Error** |
+| `2.0.0` | Use either `1.5.0`, `1.5.1` or `1.5.2` | Use `1.5.0` | **Return Error** |
### When to yank
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt
index d9d48fd9a..3d37f6bb8 100644
--- a/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-metadata.txt
@@ -10,14 +10,34 @@ DESCRIPTION
Output JSON to stdout containing information about the workspace members
and resolved dependencies of the current package.
- It is recommended to include the --format-version flag to future-proof
- your code to ensure the output is in the format you are expecting.
+ The format of the output is subject to change in futures versions of
+ Cargo. It is recommended to include the --format-version flag to
+ future-proof your code to ensure the output is in the format you are
+ expecting. For more on the expectations, see “Compatibility”.
See the cargo_metadata crate <https://crates.io/crates/cargo_metadata>
for a Rust API for reading the metadata.
OUTPUT FORMAT
- The output has the following format:
+ Compatibility
+ Within the same output format version, the compatibility is maintained,
+ except some scenarios. The following is a non-exhaustive list of changes
+ that are not considersed as incompatibile:
+
+ o Adding new fields — New fields will be added when needed. Reserving
+ this helps Cargo evolve without bumping the format version too often.
+
+ o Adding new values for enum-like fields — Same as adding new fields.
+ It keeps metadata evolving without stagnation.
+
+ o Changing opaque representations — The inner representations of some
+ fields are implementation details. For example, fields related to
+ “Package ID” or “Source ID” are treated as opaque identifiers
+ to differentiate packages or sources. Consumers shouldn’t rely on
+ those representations unless specified.
+
+ JSON format
+ The JSON output has the following format:
{
/* Array of all packages in the workspace.
@@ -29,7 +49,9 @@ OUTPUT FORMAT
"name": "my-package",
/* The version of the package. */
"version": "0.1.0",
- /* The Package ID, a unique identifier for referring to the package. */
+ /* The Package ID, an opaque and unique identifier for referring to the
+ package. See "Compatibility" above for the stability guarantee.
+ */
"id": "my-package 0.1.0 (path+file:///path/to/my-package)",
/* The license value from the manifest, or null. */
"license": "MIT/Apache-2.0",
@@ -37,14 +59,25 @@ OUTPUT FORMAT
"license_file": "LICENSE",
/* The description value from the manifest, or null. */
"description": "Package description.",
- /* The source ID of the package. This represents where
- a package is retrieved from.
+ /* The source ID of the package, an "opaque" identifier representing
+ where a package is retrieved from. See "Compatibility" above for
+ the stability guarantee.
+
This is null for path dependencies and workspace members.
+
For other dependencies, it is a string with the format:
- "registry+URL" for registry-based dependencies.
Example: "registry+https://github.com/rust-lang/crates.io-index"
- "git+URL" for git-based dependencies.
Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ - "sparse+URL" for dependencies from a sparse registry
+ Example: "sparse+https://my-sparse-registry.org"
+
+ The value after the `+` is not explicitly defined, and may change
+ between versions of Cargo and may not directly correlate to other
+ things, such as registry definitions in a config file. New source
+ kinds may be added in the future which will have different `+`
+ prefixed identifiers.
*/
"source": null,
/* Array of dependencies declared in the package's manifest. */
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt
index 7955b0e3d..dc32bdbf7 100644
--- a/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-test.txt
@@ -442,11 +442,6 @@ OPTIONS
If a string default is provided, it sets the value back to defaults.
Should not be 0.
- --keep-going
- Build as many crates in the dependency graph as possible, rather
- than aborting the build on the first one that fails to build.
- Unstable, requires -Zunstable-options.
-
--future-incompat-report
Displays a future-incompat report for any future-incompatible
warnings produced during execution of this command
diff --git a/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt b/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt
index 784e71a1a..ad966ad26 100644
--- a/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt
+++ b/src/tools/cargo/src/doc/man/generated_txt/cargo-yank.txt
@@ -50,7 +50,7 @@ DESCRIPTION
| | 1.5.2 | 1.5.0 | 2.0.0 |
+------------------------+----------------------+----------+----------+
| 2.0.0 | Use either 1.5.0, | Use | Return |
- | | 1.5.1 or 0.22.2 | 1.5.0 | Error |
+ | | 1.5.1 or 1.5.2 | 1.5.0 | Error |
+------------------------+----------------------+----------+----------+
When to yank
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-metadata.md b/src/tools/cargo/src/doc/src/commands/cargo-metadata.md
index e9aeac7df..5be6992d7 100644
--- a/src/tools/cargo/src/doc/src/commands/cargo-metadata.md
+++ b/src/tools/cargo/src/doc/src/commands/cargo-metadata.md
@@ -13,15 +13,34 @@ cargo-metadata --- Machine-readable metadata about the current package
Output JSON to stdout containing information about the workspace members and
resolved dependencies of the current package.
-It is recommended to include the `--format-version` flag to future-proof
-your code to ensure the output is in the format you are expecting.
+The format of the output is subject to change in futures versions of Cargo. It
+is recommended to include the `--format-version` flag to future-proof your code
+to ensure the output is in the format you are expecting. For more on the
+expectations, see ["Compatibility"](#compatibility).
See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata)
for a Rust API for reading the metadata.
## OUTPUT FORMAT
-The output has the following format:
+### Compatibility
+
+Within the same output format version, the compatibility is maintained, except
+some scenarios. The following is a non-exhaustive list of changes that are not
+considersed as incompatibile:
+
+* **Adding new fields** — New fields will be added when needed. Reserving this
+ helps Cargo evolve without bumping the format version too often.
+* **Adding new values for enum-like fields** — Same as adding new fields. It
+ keeps metadata evolving without stagnation.
+* **Changing opaque representations** — The inner representations of some
+ fields are implementation details. For example, fields related to "Package ID"
+ or "Source ID" are treated as opaque identifiers to differentiate packages or
+ sources. Consumers shouldn't rely on those representations unless specified.
+
+### JSON format
+
+The JSON output has the following format:
```javascript
{
@@ -34,7 +53,9 @@ The output has the following format:
"name": "my-package",
/* The version of the package. */
"version": "0.1.0",
- /* The Package ID, a unique identifier for referring to the package. */
+ /* The Package ID, an opaque and unique identifier for referring to the
+ package. See "Compatibility" above for the stability guarantee.
+ */
"id": "my-package 0.1.0 (path+file:///path/to/my-package)",
/* The license value from the manifest, or null. */
"license": "MIT/Apache-2.0",
@@ -42,14 +63,25 @@ The output has the following format:
"license_file": "LICENSE",
/* The description value from the manifest, or null. */
"description": "Package description.",
- /* The source ID of the package. This represents where
- a package is retrieved from.
+ /* The source ID of the package, an "opaque" identifier representing
+ where a package is retrieved from. See "Compatibility" above for
+ the stability guarantee.
+
This is null for path dependencies and workspace members.
+
For other dependencies, it is a string with the format:
- "registry+URL" for registry-based dependencies.
Example: "registry+https://github.com/rust-lang/crates.io-index"
- "git+URL" for git-based dependencies.
Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ - "sparse+URL" for dependencies from a sparse registry
+ Example: "sparse+https://my-sparse-registry.org"
+
+ The value after the `+` is not explicitly defined, and may change
+ between versions of Cargo and may not directly correlate to other
+ things, such as registry definitions in a config file. New source
+ kinds may be added in the future which will have different `+`
+ prefixed identifiers.
*/
"source": null,
/* Array of dependencies declared in the package's manifest. */
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-test.md b/src/tools/cargo/src/doc/src/commands/cargo-test.md
index e38e9929e..bcf46d601 100644
--- a/src/tools/cargo/src/doc/src/commands/cargo-test.md
+++ b/src/tools/cargo/src/doc/src/commands/cargo-test.md
@@ -515,12 +515,6 @@ a string <code>default</code> is provided, it sets the value back to defaults.
Should not be 0.</dd>
-<dt class="option-term" id="option-cargo-test---keep-going"><a class="option-anchor" href="#option-cargo-test---keep-going"></a><code>--keep-going</code></dt>
-<dd class="option-desc">Build as many crates in the dependency graph as possible, rather than aborting
-the build on the first one that fails to build. Unstable, requires
-<code>-Zunstable-options</code>.</dd>
-
-
<dt class="option-term" id="option-cargo-test---future-incompat-report"><a class="option-anchor" href="#option-cargo-test---future-incompat-report"></a><code>--future-incompat-report</code></dt>
<dd class="option-desc">Displays a future-incompat report for any future-incompatible warnings
produced during execution of this command</p>
diff --git a/src/tools/cargo/src/doc/src/commands/cargo-yank.md b/src/tools/cargo/src/doc/src/commands/cargo-yank.md
index 7fac5bf47..c0be657bc 100644
--- a/src/tools/cargo/src/doc/src/commands/cargo-yank.md
+++ b/src/tools/cargo/src/doc/src/commands/cargo-yank.md
@@ -46,7 +46,7 @@ requirements, following a given release being yanked:
|-------------------------------------|-----------------------------------------|------------------|------------------|
| `1.5.0` | Use either `1.5.1` or `1.5.2` | **Return Error** | Use `2.0.0` |
| `1.5.1` | Use either `1.5.0` or `1.5.2` | Use `1.5.0` | Use `2.0.0` |
-| `2.0.0` | Use either `1.5.0`, `1.5.1` or `0.22.2` | Use `1.5.0` | **Return Error** |
+| `2.0.0` | Use either `1.5.0`, `1.5.1` or `1.5.2` | Use `1.5.0` | **Return Error** |
### When to yank
diff --git a/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md b/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md
index 9b0426684..84d697f66 100644
--- a/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md
+++ b/src/tools/cargo/src/doc/src/guide/cargo-toml-vs-cargo-lock.md
@@ -35,7 +35,7 @@ regex = { git = "https://github.com/rust-lang/regex.git" }
This package has a single dependency, on the `regex` library. We’ve stated in
this case that we’re relying on a particular Git repository that lives on
GitHub. Since we haven’t specified any other information, Cargo assumes that
-we intend to use the latest commit on the `master` branch to build our package.
+we intend to use the latest commit on the default branch to build our package.
Sound good? Well, there’s one problem: If you build this package today, and
then you send a copy to me, and I build this package tomorrow, something bad
diff --git a/src/tools/cargo/src/doc/src/reference/build-scripts.md b/src/tools/cargo/src/doc/src/reference/build-scripts.md
index 68e8d404f..e7560812b 100644
--- a/src/tools/cargo/src/doc/src/reference/build-scripts.md
+++ b/src/tools/cargo/src/doc/src/reference/build-scripts.md
@@ -34,7 +34,7 @@ The sections below describe how build scripts work, and the [examples
chapter](build-script-examples.md) shows a variety of examples on how to write
scripts.
-> Note: The [`package.build` manifest key](manifest.md#package-build) can be
+> Note: The [`package.build` manifest key](manifest.md#the-build-field) can be
> used to change the name of the build script, or disable it entirely.
### Life Cycle of a Build Script
@@ -132,8 +132,7 @@ one detailed below.
scripts.
-<a id="rustc-link-arg"></a>
-#### `cargo:rustc-link-arg=FLAG`
+#### `cargo:rustc-link-arg=FLAG` {#rustc-link-arg}
The `rustc-link-arg` instruction tells Cargo to pass the [`-C link-arg=FLAG`
option][link-arg] to the compiler, but only when building supported targets
@@ -143,8 +142,7 @@ linker script.
[link-arg]: ../../rustc/codegen-options/index.md#link-arg
-<a id="rustc-link-arg-bin"></a>
-#### `cargo:rustc-link-arg-bin=BIN=FLAG`
+#### `cargo:rustc-link-arg-bin=BIN=FLAG` {#rustc-link-arg-bin}
The `rustc-link-arg-bin` instruction tells Cargo to pass the [`-C
link-arg=FLAG` option][link-arg] to the compiler, but only when building
@@ -152,8 +150,7 @@ the binary target with name `BIN`. Its usage is highly platform specific. It is
to set a linker script or other linker options.
-<a id="rustc-link-arg-bins"></a>
-#### `cargo:rustc-link-arg-bins=FLAG`
+#### `cargo:rustc-link-arg-bins=FLAG` {#rustc-link-arg-bins}
The `rustc-link-arg-bins` instruction tells Cargo to pass the [`-C
link-arg=FLAG` option][link-arg] to the compiler, but only when building a
@@ -161,8 +158,7 @@ binary target. Its usage is highly platform specific. It is useful
to set a linker script or other linker options.
-<a id="rustc-link-lib"></a>
-#### `cargo:rustc-link-lib=LIB`
+#### `cargo:rustc-link-lib=LIB` {#rustc-link-lib}
The `rustc-link-lib` instruction tells Cargo to link the given library using
the compiler's [`-l` flag][option-link]. This is typically used to link a
@@ -187,30 +183,26 @@ The optional `KIND` may be one of `dylib`, `static`, or `framework`. See the
[FFI]: ../../nomicon/ffi.md
-<a id="rustc-link-arg-tests"></a>
-#### `cargo:rustc-link-arg-tests=FLAG`
+#### `cargo:rustc-link-arg-tests=FLAG` {#rustc-link-arg-tests}
The `rustc-link-arg-tests` instruction tells Cargo to pass the [`-C
link-arg=FLAG` option][link-arg] to the compiler, but only when building a
tests target.
-<a id="rustc-link-arg-examples"></a>
-#### `cargo:rustc-link-arg-examples=FLAG`
+#### `cargo:rustc-link-arg-examples=FLAG` {#rustc-link-arg-examples}
The `rustc-link-arg-examples` instruction tells Cargo to pass the [`-C
link-arg=FLAG` option][link-arg] to the compiler, but only when building an examples
target.
-<a id="rustc-link-arg-benches"></a>
-#### `cargo:rustc-link-arg-benches=FLAG`
+#### `cargo:rustc-link-arg-benches=FLAG` {#rustc-link-arg-benches}
The `rustc-link-arg-benches` instruction tells Cargo to pass the [`-C
link-arg=FLAG` option][link-arg] to the compiler, but only when building a benchmark
target.
-<a id="rustc-link-search"></a>
-#### `cargo:rustc-link-search=[KIND=]PATH`
+#### `cargo:rustc-link-search=[KIND=]PATH` {#rustc-link-search}
The `rustc-link-search` instruction tells Cargo to pass the [`-L`
flag][option-search] to the compiler to add a directory to the library search
@@ -228,16 +220,14 @@ is fine).
[option-search]: ../../rustc/command-line-arguments.md#option-l-search-path
-<a id="rustc-flags"></a>
-#### `cargo:rustc-flags=FLAGS`
+#### `cargo:rustc-flags=FLAGS` {#rustc-flags}
The `rustc-flags` instruction tells Cargo to pass the given space-separated
flags to the compiler. This only allows the `-l` and `-L` flags, and is
equivalent to using [`rustc-link-lib`](#rustc-link-lib) and
[`rustc-link-search`](#rustc-link-search).
-<a id="rustc-cfg"></a>
-#### `cargo:rustc-cfg=KEY[="VALUE"]`
+#### `cargo:rustc-cfg=KEY[="VALUE"]` {#rustc-cfg}
The `rustc-cfg` instruction tells Cargo to pass the given value to the
[`--cfg` flag][option-cfg] to the compiler. This may be used for compile-time
@@ -258,8 +248,7 @@ identifier, the value should be a string.
[conditional compilation]: ../../reference/conditional-compilation.md
[option-cfg]: ../../rustc/command-line-arguments.md#option-cfg
-<a id="rustc-env"></a>
-#### `cargo:rustc-env=VAR=VALUE`
+#### `cargo:rustc-env=VAR=VALUE` {#rustc-env}
The `rustc-env` instruction tells Cargo to set the given environment variable
when compiling the package. The value can be then retrieved by the [`env!`
@@ -279,8 +268,7 @@ Cargo][env-cargo].
[env-macro]: ../../std/macro.env.html
[env-cargo]: environment-variables.md#environment-variables-cargo-sets-for-crates
-<a id="rustc-cdylib-link-arg"></a>
-#### `cargo:rustc-cdylib-link-arg=FLAG`
+#### `cargo:rustc-cdylib-link-arg=FLAG` {#rustc-cdylib-link-arg}
The `rustc-cdylib-link-arg` instruction tells Cargo to pass the [`-C
link-arg=FLAG` option][link-arg] to the compiler, but only when building a
@@ -288,8 +276,7 @@ link-arg=FLAG` option][link-arg] to the compiler, but only when building a
to set the shared library version or the runtime-path.
-<a id="cargo-warning"></a>
-#### `cargo:warning=MESSAGE`
+#### `cargo:warning=MESSAGE` {#cargo-warning}
The `warning` instruction tells Cargo to display a warning after the build
script has finished running. Warnings are only shown for `path` dependencies
@@ -334,8 +321,7 @@ FAQ](../faq.md#why-is-cargo-rebuilding-my-code).
[`exclude` and `include` fields]: manifest.md#the-exclude-and-include-fields
-<a id="rerun-if-changed"></a>
-#### `cargo:rerun-if-changed=PATH`
+#### `cargo:rerun-if-changed=PATH` {#rerun-if-changed}
The `rerun-if-changed` instruction tells Cargo to re-run the build script if
the file at the given path has changed. Currently, Cargo only uses the
@@ -354,15 +340,16 @@ automatically handles whether or not the script itself needs to be recompiled,
and of course the script will be re-run after it has been recompiled.
Otherwise, specifying `build.rs` is redundant and unnecessary.
-<a id="rerun-if-env-changed"></a>
-#### `cargo:rerun-if-env-changed=NAME`
+#### `cargo:rerun-if-env-changed=NAME` {#rerun-if-env-changed}
The `rerun-if-env-changed` instruction tells Cargo to re-run the build script
if the value of an environment variable of the given name has changed.
Note that the environment variables here are intended for global environment
-variables like `CC` and such, it is not necessary to use this for environment
-variables like `TARGET` that Cargo sets.
+variables like `CC` and such, it is not possible to use this for environment
+variables like `TARGET` that [Cargo sets for build scripts][build-env]. The
+environment variables in use are those received by `cargo` invocations, not
+those received by the executable of the build script.
### The `links` Manifest Key
diff --git a/src/tools/cargo/src/doc/src/reference/config.md b/src/tools/cargo/src/doc/src/reference/config.md
index 30053bb18..d1f2b04d3 100644
--- a/src/tools/cargo/src/doc/src/reference/config.md
+++ b/src/tools/cargo/src/doc/src/reference/config.md
@@ -132,6 +132,7 @@ panic = 'unwind' # The panic strategy.
incremental = true # Incremental compilation.
codegen-units = 16 # Number of code generation units.
rpath = false # Sets the rpath linking option.
+strip = "none" # Removes symbols or debuginfo.
[profile.<name>.build-override] # Overrides build-script settings.
# Same keys for a normal profile.
[profile.<name>.package.<name>] # Override profile for a package.
@@ -317,6 +318,12 @@ be specified with environment variables of the form
`CARGO_REGISTRIES_<name>_TOKEN` where `<name>` is the name of the registry in
all capital letters.
+> **Note:** Cargo also reads and writes credential files without the `.toml`
+> extension, such as `.cargo/credentials`. Support for the `.toml` extension
+> was added in version 1.39. In version 1.68, Cargo writes to the file with the
+> extension by default. However, for backward compatibility reason, when both
+> files exist, Cargo will read and write the file without the extension.
+
### Configuration keys
This section documents all configuration keys. The description for keys with
@@ -908,6 +915,13 @@ See [panic](profiles.md#panic).
See [rpath](profiles.md#rpath).
+##### `profile.<name>.strip`
+* Type: string
+* default: See profile docs.
+* Environment: `CARGO_PROFILE_<name>_STRIP`
+
+See [strip](profiles.md#strip).
+
#### `[registries]`
diff --git a/src/tools/cargo/src/doc/src/reference/environment-variables.md b/src/tools/cargo/src/doc/src/reference/environment-variables.md
index 353742877..25881d138 100644
--- a/src/tools/cargo/src/doc/src/reference/environment-variables.md
+++ b/src/tools/cargo/src/doc/src/reference/environment-variables.md
@@ -9,7 +9,7 @@ with them:
You can override these environment variables to change Cargo's behavior on your
system:
-* `CARGO_LOG` --- Cargo uses the [`env_logger`] crate to display debug log messages.
+* `CARGO_LOG` --- Cargo uses the [`tracing`] crate to display debug log messages.
The `CARGO_LOG` environment variable can be set to enable debug logging,
with a value such as `trace`, `debug`, or `warn`.
Usually it is only used during debugging. For more details refer to the
@@ -123,6 +123,7 @@ In summary, the supported environment variables are:
* `CARGO_PROFILE_<name>_PANIC` --- The panic strategy to use, see [`profile.<name>.panic`].
* `CARGO_PROFILE_<name>_RPATH` --- The rpath linking option, see [`profile.<name>.rpath`].
* `CARGO_PROFILE_<name>_SPLIT_DEBUGINFO` --- Controls debug file output behavior, see [`profile.<name>.split-debuginfo`].
+* `CARGO_PROFILE_<name>_STRIP` --- Controls stripping of symbols and/or debuginfos, see [`profile.<name>.strip`].
* `CARGO_REGISTRIES_<name>_INDEX` --- URL of a registry index, see [`registries.<name>.index`].
* `CARGO_REGISTRIES_<name>_TOKEN` --- Authentication token of a registry, see [`registries.<name>.token`].
* `CARGO_REGISTRY_DEFAULT` --- Default registry for the `--registry` flag, see [`registry.default`].
@@ -185,6 +186,7 @@ In summary, the supported environment variables are:
[`profile.<name>.panic`]: config.md#profilenamepanic
[`profile.<name>.rpath`]: config.md#profilenamerpath
[`profile.<name>.split-debuginfo`]: config.md#profilenamesplit-debuginfo
+[`profile.<name>.strip`]: config.md#profilenamestrip
[`registries.<name>.index`]: config.md#registriesnameindex
[`registries.<name>.token`]: config.md#registriesnametoken
[`registry.default`]: config.md#registrydefault
@@ -387,7 +389,7 @@ let out_dir = env::var("OUT_DIR").unwrap();
the environment; scripts should use `CARGO_ENCODED_RUSTFLAGS` instead.
* `CARGO_PKG_<var>` --- The package information variables, with the same names and values as are [provided during crate building][variables set for crates].
-[`env_logger`]: https://docs.rs/env_logger
+[`tracing`]: https://docs.rs/tracing
[debug logging]: https://doc.crates.io/contrib/architecture/console.html#debug-logging
[unix-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows
[windows-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows
diff --git a/src/tools/cargo/src/doc/src/reference/external-tools.md b/src/tools/cargo/src/doc/src/reference/external-tools.md
index 7b5110cbe..b2f37ca0b 100644
--- a/src/tools/cargo/src/doc/src/reference/external-tools.md
+++ b/src/tools/cargo/src/doc/src/reference/external-tools.md
@@ -159,7 +159,8 @@ following structure:
"profile": {
/* The optimization level. */
"opt_level": "0",
- /* The debug level, an integer of 0, 1, or 2. If `null`, it implies
+ /* The debug level, an integer of 0, 1, or 2, or a string
+ "line-directives-only" or "line-tables-only". If `null`, it implies
rustc's default of 0.
*/
"debuginfo": 2,
diff --git a/src/tools/cargo/src/doc/src/reference/manifest.md b/src/tools/cargo/src/doc/src/reference/manifest.md
index 5a3d60ccc..5f9d29ff6 100644
--- a/src/tools/cargo/src/doc/src/reference/manifest.md
+++ b/src/tools/cargo/src/doc/src/reference/manifest.md
@@ -53,7 +53,6 @@ Every manifest file consists of the following sections:
* [`[profile]`](profiles.md) --- Compiler settings and optimizations.
* [`[workspace]`](workspaces.md) --- The workspace definition.
-<a id="package-metadata"></a>
### The `[package]` section
The first section in a `Cargo.toml` is `[package]`.
@@ -112,7 +111,6 @@ breaking change.
[Resolver]: resolver.md
[SemVer compatibility]: semver.md
-<a id="the-authors-field-optional"></a>
#### The `authors` field
The optional `authors` field lists in an array the people or organizations that are considered
@@ -135,7 +133,6 @@ user interface.
> field cannot be changed or removed in already-published versions of a
> package.
-<a id="the-edition-field-optional"></a>
#### The `edition` field
The `edition` key is an optional key that affects which [Rust Edition] your package
@@ -198,7 +195,6 @@ description = "A short description of my package"
> **Note**: [crates.io] requires the `description` to be set.
-<a id="the-documentation-field-optional"></a>
#### The `documentation` field
The `documentation` field specifies a URL to a website hosting the crate's
@@ -326,7 +322,6 @@ categories = ["command-line-utilities", "development-tools::cargo-plugins"]
> match one of the strings available at <https://crates.io/category_slugs>, and
> must match exactly.
-<a id="the-workspace--field-optional"></a>
#### The `workspace` field
The `workspace` field can be used to configure the workspace that this package
@@ -347,8 +342,6 @@ table defined. That is, a crate cannot both be a root crate in a workspace
For more information, see the [workspaces chapter](workspaces.md).
-<a id="package-build"></a>
-<a id="the-build-field-optional"></a>
#### The `build` field
The `build` field specifies a file in the package root which is a [build
@@ -368,7 +361,6 @@ The default is `"build.rs"`, which loads the script from a file named
specify a path to a different file or `build = false` to disable automatic
detection of the build script.
-<a id="the-links-field-optional"></a>
#### The `links` field
The `links` field specifies the name of a native library that is being linked
@@ -386,7 +378,6 @@ on Linux) may specify:
links = "git2"
```
-<a id="the-exclude-and-include-fields-optional"></a>
#### The `exclude` and `include` fields
The `exclude` and `include` fields can be used to explicitly specify which
@@ -476,7 +467,6 @@ if any of those files change.
[gitignore]: https://git-scm.com/docs/gitignore
-<a id="the-publish--field-optional"></a>
#### The `publish` field
The `publish` field can be used to prevent a package from being published to a
@@ -501,7 +491,6 @@ publish = ["some-registry-name"]
If publish array contains a single registry, `cargo publish` command will use
it when `--registry` flag is not specified.
-<a id="the-metadata-table-optional"></a>
#### The `metadata` table
Cargo by default will warn about unused keys in `Cargo.toml` to assist in
@@ -624,6 +613,17 @@ more detail.
"#the-patch-section": "overriding-dependencies.html#the-patch-section",
"#using-patch-with-multiple-versions": "overriding-dependencies.html#using-patch-with-multiple-versions",
"#the-replace-section": "overriding-dependencies.html#the-replace-section",
+ "#package-metadata": "manifest.html#the-package-section",
+ "#the-authors-field-optional": "manifest.html#the-authors-field",
+ "#the-edition-field-optional": "manifest.html#the-edition-field",
+ "#the-documentation-field-optional": "manifest.html#the-documentation-field",
+ "#the-workspace--field-optional": "manifest.html#the-workspace-field",
+ "#package-build": "manifest.html#the-build-field",
+ "#the-build-field-optional": "manifest.html#the-build-field",
+ "#the-links-field-optional": "manifest.html#the-links-field",
+ "#the-exclude-and-include-fields-optional": "manifest.html#the-exclude-and-include-fields",
+ "#the-publish--field-optional": "manifest.html#the-publish-field",
+ "#the-metadata-table-optional": "manifest.html#the-metadata-table",
};
var target = fragments[window.location.hash];
if (target) {
diff --git a/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md b/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md
index c8e8fbcb6..c04a7929d 100644
--- a/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md
+++ b/src/tools/cargo/src/doc/src/reference/overriding-dependencies.md
@@ -349,9 +349,9 @@ crate, instead `[patch]` must be used in that situation. As a result usage of a
path override is typically isolated to quick bug fixes rather than larger
changes.
-Note: using a local configuration to override paths will only work for crates
-that have been published to [crates.io]. You cannot use this feature to tell
-Cargo how to find local unpublished crates.
+> **Note**: using a local configuration to override paths will only work for
+> crates that have been published to [crates.io]. You cannot use this feature
+> to tell Cargo how to find local unpublished crates.
[crates.io]: https://crates.io/
diff --git a/src/tools/cargo/src/doc/src/reference/profiles.md b/src/tools/cargo/src/doc/src/reference/profiles.md
index 124203d55..c094aa815 100644
--- a/src/tools/cargo/src/doc/src/reference/profiles.md
+++ b/src/tools/cargo/src/doc/src/reference/profiles.md
@@ -155,9 +155,10 @@ The valid options are:
#### lto
-The `lto` setting controls the [`-C lto` flag] which controls LLVM's [link
-time optimizations]. LTO can produce better optimized code, using
-whole-program analysis, at the cost of longer linking time.
+The `lto` setting controls `rustc`'s [`-C lto`], [`-C linker-plugin-lto`], and
+[`-C embed-bitcode`] options, which control LLVM's [link time optimizations].
+LTO can produce better optimized code, using whole-program analysis, at the cost
+of longer linking time.
The valid options are:
@@ -171,11 +172,15 @@ The valid options are:
similar to "fat".
* `"off"`: Disables LTO.
-See also the [`-C linker-plugin-lto`] `rustc` flag for cross-language LTO.
+See the [linker-plugin-lto chapter] if you are interested in cross-language LTO.
+This is not yet supported natively in Cargo, but can be performed via
+`RUSTFLAGS`.
-[`-C lto` flag]: ../../rustc/codegen-options/index.html#lto
+[`-C lto`]: ../../rustc/codegen-options/index.html#lto
[link time optimizations]: https://llvm.org/docs/LinkTimeOptimization.html
[`-C linker-plugin-lto`]: ../../rustc/codegen-options/index.html#linker-plugin-lto
+[`-C embed-bitcode`]: ../../rustc/codegen-options/index.html#embed-bitcode
+[linker-plugin-lto chapter]: ../../rustc/linker-plugin-lto.html
["thin" LTO]: http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
#### panic
diff --git a/src/tools/cargo/src/doc/src/reference/resolver.md b/src/tools/cargo/src/doc/src/reference/resolver.md
index 151648f43..ffb194c5e 100644
--- a/src/tools/cargo/src/doc/src/reference/resolver.md
+++ b/src/tools/cargo/src/doc/src/reference/resolver.md
@@ -182,11 +182,7 @@ release. Non-numeric components are compared lexicographically.
SemVer has the concept of "version metadata" with a plus in the version, such
as `1.0.0+21AF26D3`. This metadata is usually ignored, and should not be used
in a version requirement. You should never publish multiple versions that
-differ only in the metadata tag (note, this is a [known issue] with
-[crates.io] that currently permits this).
-
-[known issue]: https://github.com/rust-lang/crates.io/issues/1059
-[crates.io]: https://crates.io/
+differ only in the metadata tag.
## Other constraints
@@ -486,6 +482,7 @@ are too loose, it may be possible for new versions to be published that will
break the build.
[SemVer guidelines]: semver.md
+[crates.io]: https://crates.io/
## Troubleshooting
diff --git a/src/tools/cargo/src/doc/src/reference/semver.md b/src/tools/cargo/src/doc/src/reference/semver.md
index 5a53a6554..69d983078 100644
--- a/src/tools/cargo/src/doc/src/reference/semver.md
+++ b/src/tools/cargo/src/doc/src/reference/semver.md
@@ -59,6 +59,8 @@ considered incompatible.
* Items
* [Major: renaming/moving/removing any public items](#item-remove)
* [Minor: adding new public items](#item-new)
+ * Types
+ * [Major: Changing the alignment, layout, or size of a well-defined type](#type-layout)
* Structs
* [Major: adding a private struct field when all current fields are public](#struct-add-private-field-when-public)
* [Major: adding a public field when no private field exists](#struct-add-public-field-when-no-private)
@@ -112,8 +114,7 @@ after it has been modified, and an example usage of the code that could appear
in another project. In a minor change, the example usage should successfully
build with both the before and after versions.
-<a id="item-remove"></a>
-### Major: renaming/moving/removing any public items
+### Major: renaming/moving/removing any public items {#item-remove}
The absence of a publicly exposed [item][items] will cause any uses of that item to
fail to compile.
@@ -145,8 +146,7 @@ Mitigating strategies:
* Mark renamed items as [deprecated], and use a [`pub use`] item to re-export
to the old name.
-<a id="item-new"></a>
-### Minor: adding new public items
+### Minor: adding new public items {#item-new}
Adding new, public [items] is a minor change.
@@ -206,8 +206,752 @@ This is not considered a major change because conventionally glob imports are
a known forwards-compatibility hazard. Glob imports of items from external
crates should be avoided.
-<a id="struct-add-private-field-when-public"></a>
-### Major: adding a private struct field when all current fields are public
+### Major: Changing the alignment, layout, or size of a well-defined type {#type-layout}
+
+It is a breaking change to change the alignment, layout, or size of a type that was previously well-defined.
+
+In general, types that use the [the default representation] do not have a well-defined alignment, layout, or size.
+The compiler is free to alter the alignment, layout, or size, so code should not make any assumptions about it.
+
+> **Note**: It may be possible for external crates to break if they make assumptions about the alignment, layout, or size of a type even if it is not well-defined.
+> This is not considered a SemVer breaking change since those assumptions should not be made.
+
+Some examples of changes that are not a breaking change are (assuming no other rules in this guide are violated):
+
+* Adding, removing, reordering, or changing fields of a default representation struct, union, or enum in such a way that the change follows the other rules in this guide (for example, using `non_exhaustive` to allow those changes, or changes to private fields that are already private).
+ See [struct-add-private-field-when-public](#struct-add-private-field-when-public), [struct-add-public-field-when-no-private](#struct-add-public-field-when-no-private), [struct-private-fields-with-private](#struct-private-fields-with-private), [enum-fields-new](#enum-fields-new).
+* Adding variants to a default representation enum, if the enum uses `non_exhaustive`.
+ This may change the alignment or size of the enumeration, but those are not well-defined.
+ See [enum-variant-new](#enum-variant-new).
+* Adding, removing, reordering, or changing private fields of a `repr(C)` struct, union, or enum, following the other rules in this guide (for example, using `non_exhaustive`, or adding private fields when other private fields already exist).
+ See [repr-c-private-change](#repr-c-private-change).
+* Adding variants to a `repr(C)` enum, if the enum uses `non_exhaustive`.
+ See [repr-c-enum-variant-new](#repr-c-enum-variant-new).
+* Adding `repr(C)` to a default representation struct, union, or enum.
+ See [repr-c-add](#repr-c-add).
+* Adding `repr(<int>)` [primitive representation] to an enum.
+ See [repr-int-enum-add](#repr-int-enum-add).
+* Adding `repr(transparent)` to a default representation struct or enum.
+ See [repr-transparent-add](#repr-transparent-add).
+
+Types that use the [`repr` attribute] can be said to have an alignment and layout that is defined in some way that code may make some assumptions about that may break as a result of changing that type.
+
+In some cases, types with a `repr` attribute may not have an alignment, layout, or size that is well-defined.
+In these cases, it may be safe to make changes to the types, though care should be exercised.
+For example, types with private fields that do not otherwise document their alignment, layout, or size guarantees cannot be relied upon by external crates since the public API does not fully define the alignment, layout, or size of the type.
+
+A common example where a type with *private* fields is well-defined is a type with a single private field with a generic type, using `repr(transparent)`,
+and the prose of the documentation discusses that it is transparent to the generic type.
+For example, see [`UnsafeCell`].
+
+Some examples of breaking changes are:
+
+* Adding `repr(packed)` to a struct or union.
+ See [repr-packed-add](#repr-packed-add).
+* Adding `repr(align)` to a struct, union, or enum.
+ See [repr-align-add](#repr-align-add).
+* Removing `repr(packed)` from a struct or union.
+ See [repr-packed-remove](#repr-packed-remove).
+* Changing the value N of `repr(packed(N))` if that changes the alignment or layout.
+ See [repr-packed-n-change](#repr-packed-n-change).
+* Changing the value N of `repr(align(N))` if that changes the alignment.
+ See [repr-align-n-change](#repr-align-n-change).
+* Removing `repr(align)` from a struct, union, or enum.
+ See [repr-align-remove](#repr-align-remove).
+* Changing the order of public fields of a `repr(C)` type.
+ See [repr-c-shuffle](#repr-c-shuffle).
+* Removing `repr(C)` from a struct, union, or enum.
+ See [repr-c-remove](#repr-c-remove).
+* Removing `repr(<int>)` from an enum.
+ See [repr-int-enum-remove](#repr-int-enum-remove).
+* Changing the primitive representation of a `repr(<int>)` enum.
+ See [repr-int-enum-change](#repr-int-enum-change).
+* Removing `repr(transparent)` from a struct or enum.
+ See [repr-transparent-remove](#repr-transparent-remove).
+
+[the default representation]: ../../reference/type-layout.html#the-default-representation
+[primitive representation]: ../../reference/type-layout.html#primitive-representations
+[`repr` attribute]: ../../reference/type-layout.html#representations
+[`std::mem::transmute`]: ../../std/mem/fn.transmute.html
+[`UnsafeCell`]: ../../std/cell/struct.UnsafeCell.html#memory-layout
+
+#### Minor: `repr(C)` add, remove, or change a private field {#repr-c-private-change}
+
+It is usually safe to add, remove, or change a private field of a `repr(C)` struct, union, or enum, assuming it follows the other guidelines in this guide (see [struct-add-private-field-when-public](#struct-add-private-field-when-public), [struct-add-public-field-when-no-private](#struct-add-public-field-when-no-private), [struct-private-fields-with-private](#struct-private-fields-with-private), [enum-fields-new](#enum-fields-new)).
+
+For example, adding private fields can only be done if there are already other private fields, or it is `non_exhaustive`.
+Public fields may be added if there are private fields, or it is `non_exhaustive`, and the addition does not alter the layout of the other fields.
+
+However, this may change the size and alignment of the type.
+Care should be taken if the size or alignment changes.
+Code should not make assumptions about the size or alignment of types with private fields or `non_exhaustive` unless it has a documented size or alignment.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[derive(Default)]
+#[repr(C)]
+pub struct Example {
+ pub f1: i32,
+ f2: i32, // a private field
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[derive(Default)]
+#[repr(C)]
+pub struct Example {
+ pub f1: i32,
+ f2: i32,
+ f3: i32, // a new field
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ // NOTE: Users should not make assumptions about the size or alignment
+ // since they are not documented.
+ let f = updated_crate::Example::default();
+}
+```
+
+#### Minor: `repr(C)` add enum variant {#repr-c-enum-variant-new}
+
+It is usually safe to add variants to a `repr(C)` enum, if the enum uses `non_exhastive`.
+See [enum-variant-new](#enum-variant-new) for more discussion.
+
+Note that this may be a breaking change since it changes the size and alignment of the type.
+See [repr-c-private-change](#repr-c-private-change) for similar concerns.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(C)]
+#[non_exhaustive]
+pub enum Example {
+ Variant1 { f1: i16 },
+ Variant2 { f1: i32 },
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(C)]
+#[non_exhaustive]
+pub enum Example {
+ Variant1 { f1: i16 },
+ Variant2 { f1: i32 },
+ Variant3 { f1: i64 }, // added
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ // NOTE: Users should not make assumptions about the size or alignment
+ // since they are not specified. For example, this raised the size from 8
+ // to 16 bytes.
+ let f = updated_crate::Example::Variant2 { f1: 123 };
+}
+```
+
+#### Minor: Adding `repr(C)` to a default representation {#repr-c-add}
+
+It is safe to add `repr(C)` to a struct, union, or enum with [the default representation].
+This is safe because users should not make assumptions about the alignment, layout, or size of types with with the default representation.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Example {
+ pub f1: i32,
+ pub f2: i16,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(C)] // added
+pub struct Example {
+ pub f1: i32,
+ pub f2: i16,
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ let f = updated_crate::Example { f1: 123, f2: 456 };
+}
+```
+
+#### Minor: Adding `repr(<int>)` to an enum {#repr-int-enum-add}
+
+It is safe to add `repr(<int>)` [primitive representation] to an enum with [the default representation].
+This is safe because users should not make assumptions about the alignment, layout, or size of an enum with the default representation.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub enum E {
+ Variant1,
+ Variant2(i32),
+ Variant3 { f1: f64 },
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(i32)] // added
+pub enum E {
+ Variant1,
+ Variant2(i32),
+ Variant3 { f1: f64 },
+}
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ let x = updated_crate::E::Variant3 { f1: 1.23 };
+}
+```
+
+#### Minor: Adding `repr(transparent)` to a default representation struct or enum {#repr-transparent-add}
+
+It is safe to add `repr(transparent)` to a struct or enum with [the default representation].
+This is safe because users should not make assumptions about the alignment, layout, or size of a struct or enum with the default representation.
+
+```rust,ignore
+// MINOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[derive(Default)]
+pub struct Example<T>(T);
+
+///////////////////////////////////////////////////////////
+// After
+#[derive(Default)]
+#[repr(transparent)] // added
+pub struct Example<T>(T);
+
+///////////////////////////////////////////////////////////
+// Example use of the library that will safely work.
+fn main() {
+ let x = updated_crate::Example::<i32>::default();
+}
+```
+
+#### Major: Adding `repr(packed)` to a struct or union {#repr-packed-add}
+
+It is a breaking change to add `repr(packed)` to a struct or union.
+Making a type `repr(packed)` makes changes that can break code, such as being invalid to take a reference to a field, or causing truncation of disjoint closure captures.
+
+<!-- TODO: If all fields are private, should this be safe to do? -->
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Example {
+ pub f1: u8,
+ pub f2: u16,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(packed)] // added
+pub struct Example {
+ pub f1: u8,
+ pub f2: u16,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ let f = updated_crate::Example { f1: 1, f2: 2 };
+ let x = &f.f2; // Error: reference to packed field is unaligned
+}
+```
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Example(pub i32, pub i32);
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(packed)]
+pub struct Example(pub i32, pub i32);
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+fn main() {
+ let mut f = updated_crate::Example(123, 456);
+ let c = || {
+ // Without repr(packed), the closure precisely captures `&f.0`.
+ // With repr(packed), the closure captures `&f` to avoid undefined behavior.
+ let a = f.0;
+ };
+ f.1 = 789; // Error: cannot assign to `f.1` because it is borrowed
+ c();
+}
+```
+
+#### Major: Adding `repr(align)` to a struct, union, or enum {#repr-align-add}
+
+It is a breaking change to add `repr(align)` to a struct, union, or enum.
+Making a type `repr(align)` would break any use of that type in a `repr(packed)` type because that combination is not allowed.
+
+<!-- TODO: This seems like it should be extraordinarily rare. Should there be any exceptions carved out for this? -->
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+pub struct Aligned {
+ pub a: i32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(align(8))] // added
+pub struct Aligned {
+ pub a: i32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Aligned;
+
+#[repr(packed)]
+pub struct Packed { // Error: packed type cannot transitively contain a `#[repr(align)]` type
+ f1: Aligned,
+}
+
+fn main() {
+ let p = Packed {
+ f1: Aligned { a: 123 },
+ };
+}
+```
+
+#### Major: Removing `repr(packed)` from a struct or union {#repr-packed-remove}
+
+It is a breaking change to remove `repr(packed)` from a struct or union.
+This may change the alignment or layout that extern crates are relying on.
+
+If any fields are public, then removing `repr(packed)` may change the way disjoint closure captures work.
+In some cases, this can cause code to break, similar to those outlined in the [edition guide][edition-closures].
+
+[edition-closures]: ../../edition-guide/rust-2021/disjoint-capture-in-closures.html
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(C, packed)]
+pub struct Packed {
+ pub a: u8,
+ pub b: u16,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(C)] // removed packed
+pub struct Packed {
+ pub a: u8,
+ pub b: u16,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Packed;
+
+fn main() {
+ let p = Packed { a: 1, b: 2 };
+ // Some assumption about the size of the type.
+ // Without `packed`, this fails since the size is 4.
+ const _: () = assert!(std::mem::size_of::<Packed>() == 3); // Error: evaluation of constant value failed
+}
+```
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(C, packed)]
+pub struct Packed {
+ pub a: *mut i32,
+ pub b: i32,
+}
+unsafe impl Send for Packed {}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(C)] // removed packed
+pub struct Packed {
+ pub a: *mut i32,
+ pub b: i32,
+}
+unsafe impl Send for Packed {}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Packed;
+
+fn main() {
+ let mut x = 123;
+
+ let p = Packed {
+ a: &mut x as *mut i32,
+ b: 456,
+ };
+
+ // When the structure was packed, the closure captures `p` which is Send.
+ // When `packed` is removed, this ends up capturing `p.a` which is not Send.
+ std::thread::spawn(move || unsafe {
+ *(p.a) += 1; // Error: cannot be sent between threads safely
+ });
+}
+```
+
+#### Major: Changing the value N of `repr(packed(N))` if that changes the alignment or layout {#repr-packed-n-change}
+
+It is a breaking change to change the value of N of `repr(packed(N))` if that changes the alignment or layout.
+This may change the alignment or layout that external crates are relying on.
+
+If the value `N` is lowered below the alignment of a public field, then that would break any code that attempts to take a reference of that field.
+
+Note that some changes to `N` may not change the alignment or layout, for example increasing it when the current value is already equal to the natural alignment of the type.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(packed(4))]
+pub struct Packed {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(packed(2))] // changed to 2
+pub struct Packed {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Packed;
+
+fn main() {
+ let p = Packed { a: 1, b: 2 };
+ let x = &p.b; // Error: reference to packed field is unaligned
+}
+```
+
+#### Major: Changing the value N of `repr(align(N))` if that changes the alignment {#repr-align-n-change}
+
+It is a breaking change to change the value `N` of `repr(align(N))` if that changes the alignment.
+This may change the alignment that external crates are relying on.
+
+This change should be safe to make if the type is not well-defined as discussed in [type layout](#type-layout) (such as having any private fields and having an undocumented alignment or layout).
+
+Note that some changes to `N` may not change the alignment or layout, for example decreasing it when the current value is already equal to or less than the natural alignment of the type.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(align(8))]
+pub struct Packed {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(align(4))] // changed to 4
+pub struct Packed {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Packed;
+
+fn main() {
+ let p = Packed { a: 1, b: 2 };
+ // Some assumption about the size of the type.
+ // The alignment has changed from 8 to 4.
+ const _: () = assert!(std::mem::align_of::<Packed>() == 8); // Error: evaluation of constant value failed
+}
+```
+
+#### Major: Removing `repr(align)` from a struct, union, or enum {#repr-align-remove}
+
+It is a breaking change to remove `repr(align)` from a struct, union, or enum, if their layout was well-defined.
+This may change the alignment or layout that external crates are relying on.
+
+This change should be safe to make if the type is not well-defined as discussed in [type layout](#type-layout) (such as having any private fields and having an undocumented alignment).
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(C, align(8))]
+pub struct Packed {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(C)] // removed align
+pub struct Packed {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::Packed;
+
+fn main() {
+ let p = Packed { a: 1, b: 2 };
+ // Some assumption about the size of the type.
+ // The alignment has changed from 8 to 4.
+ const _: () = assert!(std::mem::align_of::<Packed>() == 8); // Error: evaluation of constant value failed
+}
+```
+
+#### Major: Changing the order of public fields of a `repr(C)` type {#repr-c-shuffle}
+
+It is a breaking change to change the order of public fields of a `repr(C)` type.
+External crates may be relying on the specific ordering of the fields.
+
+```rust,ignore,run-fail
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(C)]
+pub struct SpecificLayout {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(C)]
+pub struct SpecificLayout {
+ pub b: u32, // changed order
+ pub a: u8,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::SpecificLayout;
+
+extern "C" {
+ // This C function is assuming a specific layout defined in a C header.
+ fn c_fn_get_b(x: &SpecificLayout) -> u32;
+}
+
+fn main() {
+ let p = SpecificLayout { a: 1, b: 2 };
+ unsafe { assert_eq!(c_fn_get_b(&p), 2) } // Error: value not equal to 2
+}
+
+# mod cdep {
+# // This simulates what would normally be something included from a build script.
+# // This definition would be in a C header.
+# #[repr(C)]
+# pub struct SpecificLayout {
+# pub a: u8,
+# pub b: u32,
+# }
+#
+# #[no_mangle]
+# pub fn c_fn_get_b(x: &SpecificLayout) -> u32 {
+# x.b
+# }
+# }
+```
+
+#### Major: Removing `repr(C)` from a struct, union, or enum {#repr-c-remove}
+
+It is a breaking change to remove `repr(C)` from a struct, union, or enum.
+External crates may be relying on the specific layout of the type.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(C)]
+pub struct SpecificLayout {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// After
+// removed repr(C)
+pub struct SpecificLayout {
+ pub a: u8,
+ pub b: u32,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+use updated_crate::SpecificLayout;
+
+extern "C" {
+ // This C function is assuming a specific layout defined in a C header.
+ fn c_fn_get_b(x: &SpecificLayout) -> u32; // Error: is not FFI-safe
+}
+
+fn main() {
+ let p = SpecificLayout { a: 1, b: 2 };
+ unsafe { assert_eq!(c_fn_get_b(&p), 2) }
+}
+
+# mod cdep {
+# // This simulates what would normally be something included from a build script.
+# // This definition would be in a C header.
+# #[repr(C)]
+# pub struct SpecificLayout {
+# pub a: u8,
+# pub b: u32,
+# }
+#
+# #[no_mangle]
+# pub fn c_fn_get_b(x: &SpecificLayout) -> u32 {
+# x.b
+# }
+# }
+```
+
+#### Major: Removing `repr(<int>)` from an enum {#repr-int-enum-remove}
+
+It is a breaking change to remove `repr(<int>)` from an enum.
+External crates may be assuming that the discriminant is a specific size.
+For example, [`std::mem::transmute`] of an enum may fail.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(u16)]
+pub enum Example {
+ Variant1,
+ Variant2,
+ Variant3,
+}
+
+///////////////////////////////////////////////////////////
+// After
+// removed repr(u16)
+pub enum Example {
+ Variant1,
+ Variant2,
+ Variant3,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+
+fn main() {
+ let e = updated_crate::Example::Variant2;
+ let i: u16 = unsafe { std::mem::transmute(e) }; // Error: cannot transmute between types of different sizes
+}
+```
+
+#### Major: Changing the primitive representation of a `repr(<int>)` enum {#repr-int-enum-change}
+
+It is a breaking change to change the primitive representation of a `repr(<int>)` enum.
+External crates may be assuming that the discriminant is a specific size.
+For example, [`std::mem::transmute`] of an enum may fail.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(u16)]
+pub enum Example {
+ Variant1,
+ Variant2,
+ Variant3,
+}
+
+///////////////////////////////////////////////////////////
+// After
+#[repr(u8)] // changed repr size
+pub enum Example {
+ Variant1,
+ Variant2,
+ Variant3,
+}
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+
+fn main() {
+ let e = updated_crate::Example::Variant2;
+ let i: u16 = unsafe { std::mem::transmute(e) }; // Error: cannot transmute between types of different sizes
+}
+```
+
+#### Major: Removing `repr(transparent)` from a struct or enum {#repr-transparent-remove}
+
+It is a breaking change to remove `repr(transparent)` from a struct or enum.
+External crates may be relying on the type having the alignment, layout, or size of the transparent field.
+
+```rust,ignore
+// MAJOR CHANGE
+
+///////////////////////////////////////////////////////////
+// Before
+#[repr(transparent)]
+pub struct Transparent<T>(T);
+
+///////////////////////////////////////////////////////////
+// After
+// removed repr
+pub struct Transparent<T>(T);
+
+///////////////////////////////////////////////////////////
+// Example usage that will break.
+#![deny(improper_ctypes)]
+use updated_crate::Transparent;
+
+extern "C" {
+ fn c_fn() -> Transparent<f64>; // Error: is not FFI-safe
+}
+
+fn main() {}
+```
+
+### Major: adding a private struct field when all current fields are public {#struct-add-private-field-when-public}
When a private field is added to a struct that previously had all public fields,
this will break any code that attempts to construct it with a [struct literal].
@@ -241,8 +985,7 @@ Mitigation strategies:
a struct to prevent users from using struct literal syntax, and instead
provide a constructor method and/or [Default] implementation.
-<a id="struct-add-public-field-when-no-private"></a>
-### Major: adding a public field when no private field exists
+### Major: adding a public field when no private field exists {#struct-add-public-field-when-no-private}
When a public field is added to a struct that has all public fields, this will
break any code that attempts to construct it with a [struct literal].
@@ -276,8 +1019,7 @@ Mitigation strategies:
a struct to prevent users from using struct literal syntax, and instead
provide a constructor method and/or [Default] implementation.
-<a id="struct-private-fields-with-private"></a>
-### Minor: adding or removing private fields when at least one already exists
+### Minor: adding or removing private fields when at least one already exists {#struct-private-fields-with-private}
It is safe to add or remove private fields from a struct when the struct
already has at least one private field.
@@ -335,8 +1077,7 @@ fn main() {
}
```
-<a id="struct-tuple-normal-with-private"></a>
-### Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa
+### Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa {#struct-tuple-normal-with-private}
Changing a tuple struct to a normal struct (or vice-versa) is safe if all
fields are private.
@@ -367,8 +1108,7 @@ fn main() {
This is safe because existing code cannot use a [struct literal] to construct
it, nor match its contents.
-<a id="enum-variant-new"></a>
-### Major: adding new enum variants (without `non_exhaustive`)
+### Major: adding new enum variants (without `non_exhaustive`) {#enum-variant-new}
It is a breaking change to add a new enum variant if the enum does not use the
[`#[non_exhaustive]`][non_exhaustive] attribute.
@@ -404,8 +1144,7 @@ Mitigation strategies:
* When introducing the enum, mark it as [`#[non_exhaustive]`][non_exhaustive]
to force users to use [wildcard patterns] to catch new variants.
-<a id="enum-fields-new"></a>
-### Major: adding new fields to an enum variant
+### Major: adding new fields to an enum variant {#enum-fields-new}
It is a breaking change to add new fields to an enum variant because all
fields are public, and constructors and matching will fail to compile.
@@ -457,8 +1196,7 @@ Mitigation strategies:
}
```
-<a id="trait-new-item-no-default"></a>
-### Major: adding a non-defaulted trait item
+### Major: adding a non-defaulted trait item {#trait-new-item-no-default}
It is a breaking change to add a non-defaulted item to a trait. This will
break any implementors of the trait.
@@ -490,8 +1228,7 @@ Mitigation strategies:
* When introducing the trait, use the [sealed trait] technique to prevent
users outside of the crate from implementing the trait.
-<a id="trait-item-signature"></a>
-### Major: any change to trait item signatures
+### Major: any change to trait item signatures {#trait-item-signature}
It is a breaking change to make any change to a trait item signature. This can
break external implementors of the trait.
@@ -530,8 +1267,7 @@ Mitigation strategies:
* When introducing the trait, use the [sealed trait] technique to prevent
users outside of the crate from implementing the trait.
-<a id="trait-new-default-item"></a>
-### Possibly-breaking: adding a defaulted trait item
+### Possibly-breaking: adding a defaulted trait item {#trait-new-default-item}
It is usually safe to add a defaulted trait item. However, this can sometimes
cause a compile error. For example, this can introduce an ambiguity if a
@@ -581,8 +1317,7 @@ Mitigation strategies:
to require downstream users to add [disambiguation syntax] to select the
correct function when updating the dependency.
-<a id="trait-object-safety"></a>
-### Major: adding a trait item that makes the trait non-object safe
+### Major: adding a trait item that makes the trait non-object safe {#trait-object-safety}
It is a breaking change to add a trait item that changes the trait to not be
[object safe].
@@ -616,8 +1351,7 @@ fn main() {
It is safe to do the converse (making a non-object safe trait into a safe
one).
-<a id="trait-new-parameter-no-default"></a>
-### Major: adding a type parameter without a default
+### Major: adding a type parameter without a default {#trait-new-parameter-no-default}
It is a breaking change to add a type parameter without a default to a trait.
@@ -643,8 +1377,7 @@ impl Trait for Foo {} // Error: missing generics
Mitigating strategies:
* See [adding a defaulted trait type parameter](#trait-new-parameter-default).
-<a id="trait-new-parameter-default"></a>
-### Minor: adding a defaulted trait type parameter
+### Minor: adding a defaulted trait type parameter {#trait-new-parameter-default}
It is safe to add a type parameter to a trait as long as it has a default.
External implementors will use the default without needing to specify the
@@ -669,8 +1402,7 @@ struct Foo;
impl Trait for Foo {}
```
-<a id="impl-item-new"></a>
-### Possibly-breaking change: adding any inherent items
+### Possibly-breaking change: adding any inherent items {#impl-item-new}
Usually adding inherent items to an implementation should be safe because
inherent items take priority over trait items. However, in some cases the
@@ -719,8 +1451,7 @@ Mitigation strategies:
to require downstream users to add [disambiguation syntax] to select the
correct function when updating the dependency.
-<a id="generic-bounds-tighten"></a>
-### Major: tightening generic bounds
+### Major: tightening generic bounds {#generic-bounds-tighten}
It is a breaking change to tighten generic bounds on a type since this can
break users expecting the looser bounds.
@@ -749,8 +1480,7 @@ fn main() {
}
```
-<a id="generic-bounds-loosen"></a>
-### Minor: loosening generic bounds
+### Minor: loosening generic bounds {#generic-bounds-loosen}
It is safe to loosen the generic bounds on a type, as it only expands what is
allowed.
@@ -779,8 +1509,7 @@ fn main() {
}
```
-<a id="generic-new-default"></a>
-### Minor: adding defaulted type parameters
+### Minor: adding defaulted type parameters {#generic-new-default}
It is safe to add a type parameter to a type as long as it has a default. All
existing references will use the default without needing to specify the
@@ -810,8 +1539,7 @@ fn main() {
}
```
-<a id="generic-generalize-identical"></a>
-### Minor: generalizing a type to use generics (with identical types)
+### Minor: generalizing a type to use generics (with identical types) {#generic-generalize-identical}
A struct or enum field can change from a concrete type to a generic type
parameter, provided that the change results in an identical type for all
@@ -840,8 +1568,7 @@ fn main() {
because existing uses of `Foo` are shorthand for `Foo<u8>` which yields the
identical field type.
-<a id="generic-generalize-different"></a>
-### Major: generalizing a type to use generics (with possibly different types)
+### Major: generalizing a type to use generics (with possibly different types) {#generic-generalize-different}
Changing a struct or enum field from a concrete type to a generic type
parameter can break if the type can change.
@@ -866,8 +1593,7 @@ fn main() {
}
```
-<a id="generic-more-generic"></a>
-### Minor: changing a generic type to a more generic type
+### Minor: changing a generic type to a more generic type {#generic-more-generic}
It is safe to change a generic type to a more generic one. For example, the
following adds a generic parameter that defaults to the original type, which
@@ -894,8 +1620,7 @@ fn main() {
}
```
-<a id="fn-change-arity"></a>
-### Major: adding/removing function parameters
+### Major: adding/removing function parameters {#fn-change-arity}
Changing the arity of a function is a breaking change.
@@ -924,8 +1649,7 @@ Mitigating strategies:
with the builder pattern. This allows new fields to be added to the struct
in the future.
-<a id="fn-generic-new"></a>
-### Possibly-breaking: introducing a new function type parameter
+### Possibly-breaking: introducing a new function type parameter {#fn-generic-new}
Usually, adding a non-defaulted type parameter is safe, but in some
cases it can be a breaking change:
@@ -955,8 +1679,7 @@ other ways) that this breakage is usually acceptable. One should take into
account how likely it is that the function in question is being called with
explicit type arguments.
-<a id="fn-generalize-compatible"></a>
-### Minor: generalizing a function to use generics (supporting original type)
+### Minor: generalizing a function to use generics (supporting original type) {#fn-generalize-compatible}
The type of a parameter to a function, or its return value, can be
*generalized* to use generics, including by introducing a new type parameter,
@@ -1053,8 +1776,7 @@ fn main() {
}
```
-<a id="fn-generalize-mismatch"></a>
-### Major: generalizing a function to use generics with type mismatch
+### Major: generalizing a function to use generics with type mismatch {#fn-generalize-mismatch}
It is a breaking change to change a function parameter or return type if the
generic type constrains or changes the types previously allowed. For example,
@@ -1081,8 +1803,7 @@ fn main() {
}
```
-<a id="fn-unsafe-safe"></a>
-### Minor: making an `unsafe` function safe
+### Minor: making an `unsafe` function safe {#fn-unsafe-safe}
A previously `unsafe` function can be made safe without breaking code.
@@ -1122,8 +1843,7 @@ Making a previously `unsafe` associated function or method on structs / enums
safe is also a minor change, while the same is not true for associated
function on traits (see [any change to trait item signatures](#trait-item-signature)).
-<a id="attr-no-std-to-std"></a>
-### Major: switching from `no_std` support to requiring `std`
+### Major: switching from `no_std` support to requiring `std` {#attr-no-std-to-std}
If your library specifically supports a [`no_std`] environment, it is a
breaking change to make a new release that requires `std`.
@@ -1158,8 +1878,7 @@ Mitigation strategies:
optionally enables `std` support, and when the feature is off, the library
can be used in a `no_std` environment.
-<a id="attr-adding-non-exhaustive"></a>
-### Major: adding `non_exhaustive` to an existing enum, variant, or struct with no private fields
+### Major: adding `non_exhaustive` to an existing enum, variant, or struct with no private fields {#attr-adding-non-exhaustive}
Making items [`#[non_exhaustive]`][non_exhaustive] changes how they may
be used outside the crate where they are defined:
@@ -1243,8 +1962,7 @@ Mitigation strategies:
## Tooling and environment compatibility
-<a id="env-new-rust"></a>
-### Possibly-breaking: changing the minimum version of Rust required
+### Possibly-breaking: changing the minimum version of Rust required {#env-new-rust}
Introducing the use of new features in a new release of Rust can break
projects that are using older versions of Rust. This also includes using new
@@ -1271,8 +1989,7 @@ Mitigation strategies:
mechanism for new features. These are currently unstable and only available
in the nightly channel.
-<a id="env-change-requirements"></a>
-### Possibly-breaking: changing the platform and environment requirements
+### Possibly-breaking: changing the platform and environment requirements {#env-change-requirements}
There is a very wide range of assumptions a library makes about the
environment that it runs in, such as the host platform, operating system
@@ -1292,8 +2009,7 @@ Mitigation strategies:
* Document the platforms and environments you specifically support.
* Test your code on a wide range of environments in CI.
-<a id="new-lints"></a>
-### Minor: introducing new lints
+### Minor: introducing new lints {#new-lints}
Some changes to a library may cause new lints to be triggered in users of that library.
This should generally be considered a compatible change.
@@ -1348,8 +2064,7 @@ Mitigating strategies:
### Cargo
-<a id="cargo-feature-add"></a>
-#### Minor: adding a new Cargo feature
+#### Minor: adding a new Cargo feature {#cargo-feature-add}
It is usually safe to add new [Cargo features]. If the feature introduces new
changes that cause a breaking change, this can cause difficulties for projects
@@ -1371,8 +2086,7 @@ consequences of enabling the feature.
std = []
```
-<a id="cargo-feature-remove"></a>
-#### Major: removing a Cargo feature
+#### Major: removing a Cargo feature {#cargo-feature-remove}
It is usually a breaking change to remove [Cargo features]. This will cause
an error for any project that enabled the feature.
@@ -1398,8 +2112,7 @@ Mitigation strategies:
functionality. Document that the feature is deprecated, and remove it in a
future major SemVer release.
-<a id="cargo-feature-remove-another"></a>
-#### Major: removing a feature from a feature list if that changes functionality or public items
+#### Major: removing a feature from a feature list if that changes functionality or public items {#cargo-feature-remove-another}
If removing a feature from another feature, this can break existing users if
they are expecting that functionality to be available through that feature.
@@ -1420,8 +2133,7 @@ default = [] # This may cause packages to fail if they are expecting std to be
std = []
```
-<a id="cargo-remove-opt-dep"></a>
-#### Possibly-breaking: removing an optional dependency
+#### Possibly-breaking: removing an optional dependency {#cargo-remove-opt-dep}
Removing an optional dependency can break a project using your library because
another project may be enabling that dependency via [Cargo features].
@@ -1454,8 +2166,7 @@ Mitigation strategies:
optional dependencies necessary to implement "networking". Then document the
"networking" feature.
-<a id="cargo-change-dep-feature"></a>
-#### Minor: changing dependency features
+#### Minor: changing dependency features {#cargo-change-dep-feature}
It is usually safe to change the features on a dependency, as long as the
feature does not introduce a breaking change.
@@ -1475,8 +2186,7 @@ rand = { version = "0.7.3", features = ["small_rng"] }
rand = "0.7.3"
```
-<a id="cargo-dep-add"></a>
-#### Minor: adding dependencies
+#### Minor: adding dependencies {#cargo-dep-add}
It is usually safe to add new dependencies, as long as the new dependency
does not introduce new requirements that result in a breaking change.
diff --git a/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md b/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
index 8941e67ad..f044af95b 100644
--- a/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
+++ b/src/tools/cargo/src/doc/src/reference/specifying-dependencies.md
@@ -22,7 +22,7 @@ time = "0.1.12"
The string `"0.1.12"` is a version requirement. Although it looks like a
specific *version* of the `time` crate, it actually specifies a *range* of
versions and allows [SemVer] compatible updates. An update is allowed if the new
-version number does not modify the left-most non-zero digit in the major, minor,
+version number does not modify the left-most non-zero number in the major, minor,
patch grouping. In this case, if we ran `cargo update -p time`, cargo should
update us to version `0.1.13` if it is the latest `0.1.z` release, but would not
update us to `0.2.0`. If instead we had specified the version string as `1.0`,
diff --git a/src/tools/cargo/src/doc/src/reference/unstable.md b/src/tools/cargo/src/doc/src/reference/unstable.md
index b59319196..55084f88e 100644
--- a/src/tools/cargo/src/doc/src/reference/unstable.md
+++ b/src/tools/cargo/src/doc/src/reference/unstable.md
@@ -399,7 +399,7 @@ the tracking repository, and if it's not there please file a new issue!
This flag is a sibling to the `-Zbuild-std` feature flag. This will configure
the features enabled for the standard library itself when building the standard
library. The default enabled features, at this time, are `backtrace` and
-`panic_unwind`. This flag expects a comma-separated list and, if provided, will
+`panic-unwind`. This flag expects a comma-separated list and, if provided, will
override the default list of features enabled.
### binary-dep-depinfo
@@ -432,9 +432,9 @@ like to stabilize it somehow!
### keep-going
* Tracking Issue: [#10496](https://github.com/rust-lang/cargo/issues/10496)
-`cargo build --keep-going` (and similarly for `check`, `test` etc) will build as
-many crates in the dependency graph as possible, rather than aborting the build
-at the first one that fails to build.
+`cargo build --keep-going` (and similarly for every command involving compilation, like `check` and `doc`)
+will build as many crates in the dependency graph as possible,
+rather than aborting the build at the first one that fails to build.
For example if the current package depends on dependencies `fails` and `works`,
one of which fails to build, `cargo check -j1` may or may not build the one that
@@ -449,6 +449,16 @@ The `-Z unstable-options` command-line option must be used in order to use
cargo check --keep-going -Z unstable-options
```
+While `cargo test` and `cargo bench` commands involve compilation, they do not provide a `--keep-going` flag.
+Both commands already include a similar `--no-fail-fast` flag, allowing running as many tests as possible without stopping at the first failure.
+To "compile" as many tests as possible, use target selection flags like `--tests` to build test binaries separately.
+For example,
+
+```console
+cargo build --tests --keep-going -Zunstable-options
+cargo test --tests --no-fail-fast
+```
+
### config-include
* Tracking Issue: [#7723](https://github.com/rust-lang/cargo/issues/7723)
@@ -1000,7 +1010,7 @@ It is intended for the rare use cases like "cryptographic proof that the central
Both fields can be set with `cargo login --registry=name --private-key --private-key-subject="subject"` which will prompt you to put in the key value.
-A registry can have at most one of `private-key`, `token`, or `credential-process` set.
+A registry can have at most one of `private-key` or `token` set.
All PASETOs will include `iat`, the current time in ISO 8601 format. Cargo will include the following where appropriate:
- `sub` an optional, non-secret string chosen by the registry that is expected to be claimed with every request. The value will be the `private-key-subject` from the `config.toml` file.
@@ -1026,144 +1036,248 @@ If a claim should be expected for the request but is missing in the PASETO then
The `credential-process` feature adds a config setting to fetch registry
authentication tokens by calling an external process.
-Token authentication is used by the [`cargo login`], [`cargo publish`],
-[`cargo owner`], [`cargo yank`], and [`cargo logout`] commands.
-
To use this feature, you must pass the `-Z credential-process` flag on the
-command-line. Additionally, you must remove any current tokens currently saved
-in the [`credentials.toml` file] (which can be done with the [`cargo logout`] command).
+command-line.
#### `credential-process` Configuration
To configure which process to run to fetch the token, specify the process in
-the `registry` table in a [config file]:
+the `registry` table in a [config file] with spaces separating arguments. If the
+path to the provider or its arguments contain spaces, then it mused be defined in
+the `credential-alias` table and referenced instead.
```toml
[registry]
-credential-process = "/usr/bin/cargo-creds"
+global-credential-providers = ["/usr/bin/cargo-creds"]
```
-If you want to use a different process for a specific registry, it can be
+The provider at the end of the list will be attempted first. This ensures
+that when config files are merged, files closer to the project (and ultimatly
+environment variables) have precedence.
+
+In this example, the `my-provider` provider will be attempted first, and if
+it cannot provide credentials, then the `cargo:token` provider will be used.
+
+```toml
+[registry]
+global-credential-providers = ['cargo:token', 'my-provider']
+```
+
+If you want to use a different provider for a specific registry, it can be
specified in the `registries` table:
```toml
[registries.my-registry]
-credential-process = "/usr/bin/cargo-creds"
+credential-provider = "/usr/bin/cargo-creds"
```
-The value can be a string with spaces separating arguments or it can be a TOML
-array of strings.
+The credential provider for crates.io can be specified as:
-Command-line arguments allow special placeholders which will be replaced with
-the corresponding value:
+```toml
+[registry]
+credential-provider = "/usr/bin/cargo-creds"
+```
-* `{name}` --- The name of the registry.
-* `{api_url}` --- The base URL of the registry API endpoints.
-* `{action}` --- The authentication action (described below).
+The value can be a string with spaces separating arguments or it can be a TOML
+array of strings.
-Process names with the prefix `cargo:` are loaded from the `libexec` directory
-next to cargo. Several experimental credential wrappers are included with
-Cargo, and this provides convenient access to them:
+For commonly-used providers, or providers that need to contain spaces in the arguments
+or path, the `credential-alias` table can be used. These aliases can be referenced
+in `credential-provider` or `global-credential-providers`.
```toml
+[credential-alias]
+my-alias = ["/usr/bin/cargo-creds", "--argument"]
+
[registry]
-credential-process = "cargo:macos-keychain"
-```
-
-The current wrappers are:
-
-* `cargo:macos-keychain`: Uses the macOS Keychain to store the token.
-* `cargo:wincred`: Uses the Windows Credential Manager to store the token.
-* `cargo:1password`: Uses the 1password `op` CLI to store the token. You must
- install the `op` CLI from the [1password
- website](https://1password.com/downloads/command-line/). You must run `op
- signin` at least once with the appropriate arguments (such as `op signin
- my.1password.com user@example.com`), unless you provide the sign-in-address
- and email arguments. The master password will be required on each request
- unless the appropriate `OP_SESSION` environment variable is set. It supports
- the following command-line arguments:
- * `--account`: The account shorthand name to use.
- * `--vault`: The vault name to use.
- * `--sign-in-address`: The sign-in-address, which is a web address such as `my.1password.com`.
- * `--email`: The email address to sign in with.
-
-A wrapper is available for GNOME
-[libsecret](https://wiki.gnome.org/Projects/Libsecret) to store tokens on
-Linux systems. Due to build limitations, this wrapper is not available as a
-pre-compiled binary. This can be built and installed manually. First, install
-libsecret using your system package manager (for example, `sudo apt install
-libsecret-1-dev`). Then build and install the wrapper with `cargo install
-cargo-credential-gnome-secret`.
-In the config, use a path to the binary like this:
+global-credential-providers = ["cargo:token", "my-alias"]
+```
+#### Built-in providers
+
+Cargo now includes several built-in credential providers. These providers are
+executed within the Cargo process. They are identified with the `cargo:` prefix.
+
+* `cargo:token` - Uses Cargo's config and `credentials.toml` to store the token (default).
+* `cargo:wincred` - Uses the Windows Credential Manager to store the token.
+* `cargo:macos-keychain` - Uses the macOS Keychain to store the token.
+* `cargo:libsecret` - Uses [libsecret](https://wiki.gnome.org/Projects/Libsecret) to store tokens on Linux systems.
+* `cargo:token-from-stdout <command>` - Launch a subprocess that returns a token
+ on stdout. Newlines will be trimmed. The process inherits the user's stdin and stderr.
+ It should exit 0 on success, and nonzero on error.
+
+ With this form, [`cargo login`] and [`cargo logout`] are not supported and
+ return an error if used.
+
+ The following environment variables will be provided to the executed command:
+
+ * `CARGO` --- Path to the `cargo` binary executing the command.
+ * `CARGO_REGISTRY_INDEX_URL` --- The URL of the registry index.
+ * `CARGO_REGISTRY_NAME_OPT` --- Optional name of the registry. Should not be used as a storage key. Not always available.
+
+* `cargo:paseto` - implements asymmetric token support (RFC3231) as a credential provider.
+
+
+`cargo-credential-1password` uses the 1password `op` CLI to store the token. You must
+install the `op` CLI from the [1password
+website](https://1password.com/downloads/command-line/). You must run `op
+signin` at least once with the appropriate arguments (such as `op signin
+my.1password.com user@example.com`), unless you provide the sign-in-address
+and email arguments. The master password will be required on each request
+unless the appropriate `OP_SESSION` environment variable is set. It supports
+the following command-line arguments:
+* `--account`: The account shorthand name to use.
+* `--vault`: The vault name to use.
+* `--sign-in-address`: The sign-in-address, which is a web address such as `my.1password.com`.
+* `--email`: The email address to sign in with.
+
+Install the provider with `cargo install cargo-credential-1password`
+In the config, add it to `global-credential-providers`:
```toml
[registry]
-credential-process = "cargo-credential-gnome-secret {action}"
+global-credential-providers = ["cargo-credential-1password"]
```
-#### `credential-process` Interface
-
-There are two different kinds of token processes that Cargo supports. The
-simple "basic" kind will only be called by Cargo when it needs a token. This
-is intended for simple and easy integration with password managers, that can
-often use pre-existing tooling. The more advanced "Cargo" kind supports
-different actions passed as a command-line argument. This is intended for more
-pleasant integration experience, at the expense of requiring a Cargo-specific
-process to glue to the password manager. Cargo will determine which kind is
-supported by the `credential-process` definition. If it contains the
-`{action}` argument, then it uses the advanced style, otherwise it assumes it
-only supports the "basic" kind.
-
-##### Basic authenticator
-
-A basic authenticator is a process that returns a token on stdout. Newlines
-will be trimmed. The process inherits the user's stdin and stderr. It should
-exit 0 on success, and nonzero on error.
-
-With this form, [`cargo login`] and [`cargo logout`] are not supported and
-return an error if used.
-
-##### Cargo authenticator
-
-The protocol between the Cargo and the process is very basic, intended to
-ensure the credential process is kept as simple as possible. Cargo will
-execute the process with the `{action}` argument indicating which action to
-perform:
+#### JSON Interface
+When using an external credential provider, Cargo communicates with the credential
+provider using stdin/stdout messages passed as a single line of JSON.
-* `store` --- Store the given token in secure storage.
-* `get` --- Get a token from storage.
-* `erase` --- Remove a token from storage.
+Cargo will always execute the credential provider with the `--cargo-plugin` argument.
+This enables a credential provider executable to have additional functionality beyond
+how Cargo uses it.
-The `cargo login` command uses `store` to save a token. Commands that require
-authentication, like `cargo publish`, uses `get` to retrieve a token. `cargo
-logout` uses the `erase` command to remove a token.
+The messages here have additional newlines added for readability.
+Actual messages must not contain newlines.
-The process inherits the user's stderr, so the process can display messages.
-Some values are passed in via environment variables (see below). The expected
-interactions are:
-
-* `store` --- The token is sent to the process's stdin, terminated by a newline.
- The process should store the token keyed off the registry name. If the
- process fails, it should exit with a nonzero exit status.
+##### Credential hello
+* Sent by: credential provider
+* Purpose: used to identify the supported protocols on process startup
+```javascript
+{
+ "v":[1]
+}
+```
-* `get` --- The process should send the token to its stdout (trailing newline
- will be trimmed). The process inherits the user's stdin, should it need to
- receive input.
+##### Login request
+* Sent by: Cargo
+* Purpose: collect and store credentials
+```javascript
+{
+ // Protocol version
+ "v":1,
+ // Action to perform: login
+ "kind":"login",
+ // Registry information
+ "registry":{"index-url":"sparse+https://registry-url/index/", "name": "my-registry"},
+}
+```
- If the process is unable to fulfill the request, it should exit with a
- nonzero exit code.
+##### Read request
+* Sent by: Cargo
+* Purpose: Get the credential for reading crate information
+```javascript
+{
+ // Protocol version
+ "v":1,
+ // Request kind: get credentials
+ "kind":"get",
+ // Action to perform: read crate information
+ "operation":"read",
+ // Registry information
+ "registry":{"index-url":"sparse+https://registry-url/index/", "name": "my-registry"},
+ // Additional command-line args
+ "args":[]
+}
+```
-* `erase` --- The process should remove the token associated with the registry
- name. If the token is not found, the process should exit with a 0 exit
- status.
+##### Publish request
+* Sent by: Cargo
+* Purpose: Get the credential for publishing a crate
+```javascript
+{
+ // Protocol version
+ "v":1,
+ // Request kind: get credentials
+ "kind":"get",
+ // Action to perform: publish crate
+ "operation":"publish",
+ // Crate name
+ "name":"sample",
+ // Crate version
+ "vers":"0.1.0",
+ // Crate checksum
+ "cksum":"...",
+ // Registry information
+ "registry":{"index-url":"sparse+https://registry-url/index/", "name": "my-registry"},
+ // Additional command-line args
+ "args":[]
+}
+```
-##### Environment
+##### Success response
+* Sent by: credential process
+* Purpose: Gives the credential to Cargo
+```javascript
+{"Ok":{
+ // Response kind: this was a get request kind
+ "kind":"get",
+ // Token to send to the registry
+ "token":"...",
+ // Cache control. Can be one of the following:
+ // * "never"
+ // * "session"
+ // * { "expires": UNIX timestamp }
+ "cache":{"expires":1684251794},
+ // Is the token operation independent?
+ "operation_independent":true
+}}
+```
-The following environment variables will be provided to the executed command:
+##### Failure response
+* Sent by: credential process
+* Purpose: Gives error information to Cargo
+```javascript
+{"Err":{
+ // Error: the credential provider does not support the
+ // registry
+ "kind":"url-not-supported",
+
+ // Error: The credential could not be found in the provider.
+ // using `cargo login --registry ...`.
+ "kind":"not-found",
+
+ // Error: something else has failed
+ "kind":"other",
+ "detail": "free form string error message"
+}}
+```
-* `CARGO` --- Path to the `cargo` binary executing the command.
-* `CARGO_REGISTRY_INDEX_URL` --- The URL of the registry index.
-* `CARGO_REGISTRY_NAME_OPT` --- Optional name of the registry. Should not be used as a storage key. Not always available.
+##### Example communication to request a token for reading:
+1. Cargo spawns the credential process, capturing stdin and stdout.
+2. Credential process sends the Hello message to Cargo
+ ```javascript
+ { "v": [1] }
+ ```
+3. Cargo sends the CredentialRequest message to the credential process (newlines added for readability).
+ ```javascript
+ {
+ "v": 1,
+ "kind": "get",
+ "operation": "read",
+ "registry":{"index-url":"sparse+https://registry-url/index/", "name":"ado2"},
+ "args":[]
+ }
+ ```
+4. Credential process sends the CredentialResponse to Cargo (newlines added for readability).
+ ```javascript
+ {
+ "token": "...",
+ "cache": "session",
+ "operation_independent": false
+ }
+ ```
+5. Credential process exits
+6. Cargo uses the token for the remainder of the session (until Cargo exits) when interacting with this registry.
[`cargo login`]: ../commands/cargo-login.md
[`cargo logout`]: ../commands/cargo-logout.md
diff --git a/src/tools/cargo/src/doc/src/reference/workspaces.md b/src/tools/cargo/src/doc/src/reference/workspaces.md
index 21f8f08f8..36a2e7323 100644
--- a/src/tools/cargo/src/doc/src/reference/workspaces.md
+++ b/src/tools/cargo/src/doc/src/reference/workspaces.md
@@ -56,7 +56,6 @@ version = "0.1.0" # the current version, obeying semver
authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
```
-<a id="virtual-manifest"></a>
#### Virtual workspace
Alternatively, a `Cargo.toml` file can be created with a `[workspace]` section
@@ -68,6 +67,7 @@ you want to keep all the packages organized in separate directories.
# [PROJECT_DIR]/Cargo.toml
[workspace]
members = ["hello_world"]
+resolver = "2"
```
```toml
@@ -75,9 +75,15 @@ members = ["hello_world"]
[package]
name = "hello_world" # the name of the package
version = "0.1.0" # the current version, obeying semver
+edition = "2021" # the edition, will have no effect on a resolver used in the workspace
authors = ["Alice <a@example.com>", "Bob <b@example.com>"]
```
+Note that in a virtual manifest the [`resolver = "2"`](resolver.md#resolver-versions)
+should be specified manually. It is usually deduced from the [`package.edition`][package-edition]
+field which is absent in virtual manifests and the edition field of a member
+won't affect the resolver used by the workspace.
+
### The `members` and `exclude` fields
The `members` and `exclude` fields define which packages are members of
@@ -242,6 +248,7 @@ if that makes sense for the tool in question.
[package]: manifest.md#the-package-section
[`Cargo.lock`]: ../guide/cargo-toml-vs-cargo-lock.md
[package-metadata]: manifest.md#the-metadata-table
+[package-edition]: manifest.md#the-edition-field
[output directory]: ../guide/build-cache.md
[patch]: overriding-dependencies.md#the-patch-section
[replace]: overriding-dependencies.md#the-replace-section
@@ -253,3 +260,17 @@ if that makes sense for the tool in question.
[specifying-dependencies]: specifying-dependencies.md
[features]: features.md
[inheriting-a-dependency-from-a-workspace]: specifying-dependencies.md#inheriting-a-dependency-from-a-workspace
+
+<script>
+(function() {
+ var fragments = {
+ "#virtual-manifest": "workspaces.html#virtual-workspace",
+ };
+ var target = fragments[window.location.hash];
+ if (target) {
+ var url = window.location.toString();
+ var base = url.substring(0, url.lastIndexOf('/'));
+ window.location.replace(base + "/" + target);
+ }
+})();
+</script>
diff --git a/src/tools/cargo/src/etc/_cargo b/src/tools/cargo/src/etc/_cargo
index bdceb10c9..164d7679f 100644
--- a/src/tools/cargo/src/etc/_cargo
+++ b/src/tools/cargo/src/etc/_cargo
@@ -45,9 +45,13 @@ _cargo() {
'(--bench --bin --example --lib)--test=[specify test name]:test name'
)
- parallel=(
+ jobs=(
'(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]'
- '--keep-going[do not abort build on first error]'
+ )
+
+ parallel=(
+ "${jobs[@]}"
+ '--keep-going[do not abort build on first build error]'
)
features=(
@@ -87,7 +91,7 @@ _cargo() {
'*:args:_default'
;;
bench)
- _arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \
+ _arguments -s -A "^--" $common $jobs $features $msgfmt $triple $target $manifest \
"${command_scope_spec[@]}" \
'--all-targets[benchmark all targets]' \
"--no-run[compile but don't run]" \
@@ -297,7 +301,7 @@ _cargo() {
;;
test | t)
- _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
+ _arguments -s -S $common $jobs $features $msgfmt $triple $target $manifest \
'--test=[test name]: :_cargo_test_names' \
'--no-fail-fast[run all tests regardless of failure]' \
'--no-run[compile but do not run]' \
diff --git a/src/tools/cargo/src/etc/cargo.bashcomp.sh b/src/tools/cargo/src/etc/cargo.bashcomp.sh
index 2867ec56d..33f225ebf 100644
--- a/src/tools/cargo/src/etc/cargo.bashcomp.sh
+++ b/src/tools/cargo/src/etc/cargo.bashcomp.sh
@@ -41,7 +41,8 @@ _cargo()
local opt_pkg='-p --package'
local opt_feat='-F --features --all-features --no-default-features'
local opt_mani='--manifest-path'
- local opt_parallel='-j --jobs --keep-going'
+ local opt_jobs='-j --jobs'
+ local opt_parallel="$opt_jobs --keep-going"
local opt_force='-f --force'
local opt_sync='-s --sync'
local opt_lock='--frozen --locked --offline'
@@ -49,7 +50,7 @@ _cargo()
local opt___nocmd="$opt_common -V --version --list --explain"
local opt__add="$opt_common -p --package --features --default-features --no-default-features $opt_mani --optional --no-optional --rename --dry-run --path --git --branch --tag --rev --registry --dev --build --target"
- local opt__bench="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --no-run --no-fail-fast --target-dir --ignore-rust-version"
+ local opt__bench="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --target --no-run --no-fail-fast --target-dir --ignore-rust-version"
local opt__build="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir --ignore-rust-version"
local opt__b="$opt__build"
local opt__check="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --profile --target-dir --ignore-rust-version"
@@ -82,7 +83,7 @@ _cargo()
local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets -L --crate-type --extern --message-format --profile --target --release --target-dir --ignore-rust-version"
local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --target --release --open --target-dir --profile --ignore-rust-version"
local opt__search="$opt_common $opt_lock --limit --index --registry"
- local opt__test="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_parallel $opt_targets --message-format --doc --target --no-run --release --no-fail-fast --target-dir --profile --ignore-rust-version"
+ local opt__test="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --doc --target --no-run --release --no-fail-fast --target-dir --profile --ignore-rust-version"
local opt__t="$opt__test"
local opt__tree="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock --target -i --invert --prefix --no-dedupe --duplicates -d --charset -f --format -e --edges"
local opt__uninstall="$opt_common $opt_lock $opt_pkg --bin --root"
diff --git a/src/tools/cargo/src/etc/man/cargo-metadata.1 b/src/tools/cargo/src/etc/man/cargo-metadata.1
index f2ebb63c7..95597d413 100644
--- a/src/tools/cargo/src/etc/man/cargo-metadata.1
+++ b/src/tools/cargo/src/etc/man/cargo-metadata.1
@@ -11,13 +11,37 @@ cargo\-metadata \[em] Machine\-readable metadata about the current package
Output JSON to stdout containing information about the workspace members and
resolved dependencies of the current package.
.sp
-It is recommended to include the \fB\-\-format\-version\fR flag to future\-proof
-your code to ensure the output is in the format you are expecting.
+The format of the output is subject to change in futures versions of Cargo. It
+is recommended to include the \fB\-\-format\-version\fR flag to future\-proof your code
+to ensure the output is in the format you are expecting. For more on the
+expectations, see \[lq]Compatibility\[rq]\&.
.sp
See the \fIcargo_metadata crate\fR <https://crates.io/crates/cargo_metadata>
for a Rust API for reading the metadata.
.SH "OUTPUT FORMAT"
-The output has the following format:
+.SS "Compatibility"
+Within the same output format version, the compatibility is maintained, except
+some scenarios. The following is a non\-exhaustive list of changes that are not
+considersed as incompatibile:
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBAdding new fields\fR \[em] New fields will be added when needed. Reserving this
+helps Cargo evolve without bumping the format version too often.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBAdding new values for enum\-like fields\fR \[em] Same as adding new fields. It
+keeps metadata evolving without stagnation.
+.RE
+.sp
+.RS 4
+\h'-04'\(bu\h'+02'\fBChanging opaque representations\fR \[em] The inner representations of some
+fields are implementation details. For example, fields related to \[lq]Package ID\[rq]
+or \[lq]Source ID\[rq] are treated as opaque identifiers to differentiate packages or
+sources. Consumers shouldn\[cq]t rely on those representations unless specified.
+.RE
+.SS "JSON format"
+The JSON output has the following format:
.sp
.RS 4
.nf
@@ -31,7 +55,9 @@ The output has the following format:
"name": "my\-package",
/* The version of the package. */
"version": "0.1.0",
- /* The Package ID, a unique identifier for referring to the package. */
+ /* The Package ID, an opaque and unique identifier for referring to the
+ package. See "Compatibility" above for the stability guarantee.
+ */
"id": "my\-package 0.1.0 (path+file:///path/to/my\-package)",
/* The license value from the manifest, or null. */
"license": "MIT/Apache\-2.0",
@@ -39,14 +65,25 @@ The output has the following format:
"license_file": "LICENSE",
/* The description value from the manifest, or null. */
"description": "Package description.",
- /* The source ID of the package. This represents where
- a package is retrieved from.
+ /* The source ID of the package, an "opaque" identifier representing
+ where a package is retrieved from. See "Compatibility" above for
+ the stability guarantee.
+
This is null for path dependencies and workspace members.
+
For other dependencies, it is a string with the format:
\- "registry+URL" for registry\-based dependencies.
Example: "registry+https://github.com/rust\-lang/crates.io\-index"
\- "git+URL" for git\-based dependencies.
Example: "git+https://github.com/rust\-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+ \- "sparse+URL" for dependencies from a sparse registry
+ Example: "sparse+https://my\-sparse\-registry.org"
+
+ The value after the `+` is not explicitly defined, and may change
+ between versions of Cargo and may not directly correlate to other
+ things, such as registry definitions in a config file. New source
+ kinds may be added in the future which will have different `+`
+ prefixed identifiers.
*/
"source": null,
/* Array of dependencies declared in the package's manifest. */
diff --git a/src/tools/cargo/src/etc/man/cargo-test.1 b/src/tools/cargo/src/etc/man/cargo-test.1
index 802169815..4ca150dbc 100644
--- a/src/tools/cargo/src/etc/man/cargo-test.1
+++ b/src/tools/cargo/src/etc/man/cargo-test.1
@@ -535,13 +535,6 @@ a string \fBdefault\fR is provided, it sets the value back to defaults.
Should not be 0.
.RE
.sp
-\fB\-\-keep\-going\fR
-.RS 4
-Build as many crates in the dependency graph as possible, rather than aborting
-the build on the first one that fails to build. Unstable, requires
-\fB\-Zunstable\-options\fR\&.
-.RE
-.sp
\fB\-\-future\-incompat\-report\fR
.RS 4
Displays a future\-incompat report for any future\-incompatible warnings
diff --git a/src/tools/cargo/src/etc/man/cargo-yank.1 b/src/tools/cargo/src/etc/man/cargo-yank.1
index 107c59e49..2f46dd694 100644
--- a/src/tools/cargo/src/etc/man/cargo-yank.1
+++ b/src/tools/cargo/src/etc/man/cargo-yank.1
@@ -72,7 +72,7 @@ T}
T{
\fB2.0.0\fR
T}:T{
-Use either \fB1.5.0\fR, \fB1.5.1\fR or \fB0.22.2\fR
+Use either \fB1.5.0\fR, \fB1.5.1\fR or \fB1.5.2\fR
T}:T{
Use \fB1.5.0\fR
T}:T{
diff --git a/src/tools/cargo/tests/testsuite/alt_registry.rs b/src/tools/cargo/tests/testsuite/alt_registry.rs
index ac60ca92f..91157cd53 100644
--- a/src/tools/cargo/tests/testsuite/alt_registry.rs
+++ b/src/tools/cargo/tests/testsuite/alt_registry.rs
@@ -1506,3 +1506,44 @@ fn publish_with_transitive_dep() {
.build();
p2.cargo("publish").run();
}
+
+#[cargo_test]
+fn warn_for_unused_fields() {
+ let _ = RegistryBuilder::new()
+ .no_configure_token()
+ .alternative()
+ .build();
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ ".cargo/config.toml",
+ "[registry]
+ unexpected-field = 'foo'
+ [registries.alternative]
+ unexpected-field = 'foo'
+ ",
+ )
+ .build();
+
+ p.cargo("publish --registry alternative")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] `alternative` index
+[WARNING] unused config key `registries.alternative.unexpected-field` in `[..]config.toml`
+[ERROR] no token found for `alternative`, please run `cargo login --registry alternative`
+or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN",
+ )
+ .run();
+
+ p.cargo("publish --registry crates-io")
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] crates.io index
+[WARNING] unused config key `registry.unexpected-field` in `[..]config.toml`
+[ERROR] no token found, please run `cargo login`
+or use environment variable CARGO_REGISTRY_TOKEN",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/bench.rs b/src/tools/cargo/tests/testsuite/bench.rs
index 581acbe15..d773308c6 100644
--- a/src/tools/cargo/tests/testsuite/bench.rs
+++ b/src/tools/cargo/tests/testsuite/bench.rs
@@ -314,9 +314,9 @@ fn cargo_bench_failing_test() {
[RUNNING] [..] (target/release/deps/foo-[..][EXE])",
)
.with_stdout_contains("[..]thread '[..]' panicked at[..]")
- .with_stdout_contains("[..]assertion failed[..]")
- .with_stdout_contains("[..]left: `\"hello\"`[..]")
- .with_stdout_contains("[..]right: `\"nope\"`[..]")
+ .with_stdout_contains("[..]assertion [..]failed[..]")
+ .with_stdout_contains("[..]left: [..]\"hello\"[..]")
+ .with_stdout_contains("[..]right: [..]\"nope\"[..]")
.with_stdout_contains("[..]src/main.rs:15[..]")
.with_status(101)
.run();
@@ -1670,3 +1670,58 @@ fn json_artifact_includes_executable_for_benchmark() {
)
.run();
}
+
+#[cargo_test]
+fn cargo_bench_no_keep_going() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("bench --keep-going")
+ .with_stderr(
+ "\
+error: unexpected argument `--keep-going` found
+
+ tip: to run as many benchmarks as possible without failing fast, use `--no-fail-fast`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test(nightly, reason = "bench")]
+fn cargo_bench_print_env_verbose() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
+ .file(
+ "src/main.rs",
+ r#"
+ #![feature(test)]
+ #[cfg(test)]
+ extern crate test;
+
+ fn hello() -> &'static str {
+ "hello"
+ }
+
+ pub fn main() {
+ println!("{}", hello())
+ }
+
+ #[bench]
+ fn bench_hello(_b: &mut test::Bencher) {
+ assert_eq!(hello(), "hello")
+ }
+ "#,
+ )
+ .build();
+ p.cargo("bench -vv")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] rustc[..]`
+[FINISHED] bench [optimized] target(s) in [..]
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] [CWD]/target/release/deps/foo-[..][EXE] --bench`",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/build_script.rs b/src/tools/cargo/tests/testsuite/build_script.rs
index 4840356c6..400d10547 100644
--- a/src/tools/cargo/tests/testsuite/build_script.rs
+++ b/src/tools/cargo/tests/testsuite/build_script.rs
@@ -5141,6 +5141,35 @@ for more information about build script outputs.
}
#[cargo_test]
+fn wrong_syntax_with_two_colons() {
+ let p = project()
+ .file("src/lib.rs", "")
+ .file(
+ "build.rs",
+ r#"
+ fn main() {
+ println!("cargo::foo=bar");
+ }
+ "#,
+ )
+ .build();
+
+ p.cargo("build")
+ .with_status(101)
+ .with_stderr(
+ "\
+[COMPILING] foo [..]
+error: unsupported output in build script of `foo v0.0.1 ([ROOT]/foo)`: `cargo::foo=bar`
+Found a `cargo::key=value` build directive which is reserved for future use.
+Either change the directive to `cargo:key=value` syntax (note the single `:`) or upgrade your version of Rust.
+See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \
+for more information about build script outputs.
+",
+ )
+ .run();
+}
+
+#[cargo_test]
fn custom_build_closes_stdin() {
// Ensure stdin is closed to prevent deadlock.
// See https://github.com/rust-lang/cargo/issues/11196
diff --git a/src/tools/cargo/tests/testsuite/build_script_env.rs b/src/tools/cargo/tests/testsuite/build_script_env.rs
index bc87b7120..df574600c 100644
--- a/src/tools/cargo/tests/testsuite/build_script_env.rs
+++ b/src/tools/cargo/tests/testsuite/build_script_env.rs
@@ -117,7 +117,10 @@ fn rustc_bootstrap() {
"#;
let p = project()
.file("Cargo.toml", &basic_manifest("has-dashes", "0.0.1"))
- .file("src/lib.rs", "#![feature(rustc_attrs)]")
+ .file(
+ "src/lib.rs",
+ "#![allow(internal_features)] #![feature(rustc_attrs)]",
+ )
.file("build.rs", build_rs)
.build();
// RUSTC_BOOTSTRAP unset on stable should error
@@ -154,7 +157,10 @@ fn rustc_bootstrap() {
// Tests for binaries instead of libraries
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
- .file("src/main.rs", "#![feature(rustc_attrs)] fn main() {}")
+ .file(
+ "src/main.rs",
+ "#![allow(internal_features)] #![feature(rustc_attrs)] fn main() {}",
+ )
.file("build.rs", build_rs)
.build();
// nightly should warn when there's no library whether or not RUSTC_BOOTSTRAP is set
diff --git a/src/tools/cargo/tests/testsuite/cargo/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo/help/mod.rs
new file mode 100644
index 000000000..559377b27
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo/help/mod.rs
@@ -0,0 +1,12 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/auto_git/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo/help/stdout.log
new file mode 100644
index 000000000..26bcd745b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo/help/stdout.log
@@ -0,0 +1,39 @@
+Rust's package manager
+
+Usage: cargo [..][OPTIONS] [COMMAND]
+ cargo [..][OPTIONS] -Zscript <MANIFEST_RS> [ARGS]...
+
+Options:
+ -V, --version Print version info and exit
+ --list List installed commands
+ --explain <CODE> Run `rustc --explain CODE`
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ -q, --quiet Do not print cargo log messages
+ --color <WHEN> Coloring: auto, always, never
+ -C <DIRECTORY> Change to DIRECTORY before doing anything (nightly-only)
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Some common cargo commands are (see all commands with --list):
+ build, b Compile the current package
+ check, c Analyze the current package and report errors, but don't build object files
+ clean Remove the target directory
+ doc, d Build this package's and its dependencies' documentation
+ new Create a new cargo package
+ init Create a new cargo package in an existing directory
+ add Add dependencies to a manifest file
+ remove Remove dependencies from a manifest file
+ run, r Run a binary or example of the local package
+ test, t Run the tests
+ bench Run the benchmarks
+ update Update dependencies listed in Cargo.lock
+ search Search registry for crates
+ publish Package and upload this package to the registry
+ install Install a Rust binary. Default location is $HOME/.cargo/bin
+ uninstall Uninstall a Rust binary
+
+See 'cargo help <command>' for more information on a specific command.
diff --git a/src/tools/cargo/tests/testsuite/cargo/mod.rs b/src/tools/cargo/tests/testsuite/cargo/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/help/mod.rs
new file mode 100644
index 000000000..0962047f8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("add")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_add/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log
new file mode 100644
index 000000000..0daba1a94
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_add/help/stdout.log
@@ -0,0 +1,124 @@
+Add dependencies to a Cargo.toml manifest file
+
+Usage: cargo add [OPTIONS] <DEP>[@<VERSION>] ...
+ cargo add [OPTIONS] --path <PATH> ...
+ cargo add [OPTIONS] --git <URL> ...
+
+Arguments:
+ [DEP_ID]...
+ Reference to a package to add as a dependency
+
+ You can reference a package by:
+ - `<name>`, like `cargo add serde` (latest version will be used)
+ - `<name>@<version-req>`, like `cargo add serde@1` or `cargo add serde@=1.0.38`
+
+Options:
+ --no-default-features
+ Disable the default features
+
+ --default-features
+ Re-enable the default features
+
+ -F, --features <FEATURES>
+ Space or comma separated list of features to activate
+
+ --optional
+ Mark the dependency as optional
+
+ The package name will be exposed as feature of your crate.
+
+ --no-optional
+ Mark the dependency as required
+
+ The package will be removed from your features.
+
+ --rename <NAME>
+ Rename the dependency
+
+ Example uses:
+ - Depending on multiple versions of a crate
+ - Depend on crates with the same name from different registries
+
+ --ignore-rust-version
+ Ignore `rust-version` specification in packages (unstable)
+
+ --dry-run
+ Don't actually write the manifest
+
+ -q, --quiet
+ Do not print cargo log messages
+
+ -v, --verbose...
+ Use verbose output (-vv very verbose/build.rs output)
+
+ --color <WHEN>
+ Coloring: auto, always, never
+
+ --config <KEY=VALUE>
+ Override a configuration value
+
+ -Z <FLAG>
+ Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+
+ -h, --help
+ Print help (see a summary with '-h')
+
+Manifest Options:
+ --manifest-path <PATH>
+ Path to Cargo.toml
+
+ --frozen
+ Require Cargo.lock and cache are up to date
+
+ --locked
+ Require Cargo.lock is up to date
+
+ --offline
+ Run without accessing the network
+
+Package Selection:
+ -p, --package [<SPEC>]
+ Package to modify
+
+Source:
+ --path <PATH>
+ Filesystem path to local crate to add
+
+ --git <URI>
+ Git repository location
+
+ Without any other information, cargo will use latest commit on the main branch.
+
+ --branch <BRANCH>
+ Git branch to download the crate from
+
+ --tag <TAG>
+ Git tag to download the crate from
+
+ --rev <REV>
+ Git reference to download the crate from
+
+ This is the catch all, handling hashes to named references in remote repositories.
+
+ --registry <NAME>
+ Package registry for this dependency
+
+Section:
+ --dev
+ Add as development dependency
+
+ Dev-dependencies are not used when compiling a package for building, but are used for
+ compiling tests, examples, and benchmarks.
+
+ These dependencies are not propagated to other packages which depend on this package.
+
+ --build
+ Add as build dependency
+
+ Build-dependencies are the only dependencies available for use by build scripts
+ (`build.rs` files).
+
+ --target <TARGET>
+ Add as dependency to the given target platform
+
+Run `cargo help add` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_add/mod.rs b/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
index be7a1546b..de93afbc1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_add/mod.rs
@@ -35,6 +35,7 @@ mod git_normalized_name;
mod git_registry;
mod git_rev;
mod git_tag;
+mod help;
mod infer_prerelease;
mod invalid_arg;
mod invalid_git_name;
diff --git a/src/tools/cargo/tests/testsuite/cargo_bench/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_bench/help/mod.rs
new file mode 100644
index 000000000..9338664e5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_bench/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("bench")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log b/src/tools/cargo/tests/testsuite/cargo_bench/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_bench/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log
new file mode 100644
index 000000000..5d9484df9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_bench/help/stdout.log
@@ -0,0 +1,59 @@
+Execute all benchmarks of a local package
+
+Usage: cargo[EXE] bench [OPTIONS] [BENCHNAME] [-- [args]...]
+
+Arguments:
+ [BENCHNAME] If specified, only run benches containing this string in their names
+ [args]... Arguments for the bench binary
+
+Options:
+ --no-run Compile, but don't run benchmarks
+ --no-fail-fast Run all benchmarks regardless of failure
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to run benchmarks for
+ --workspace Benchmark all packages in the workspace
+ --exclude <SPEC> Exclude packages from the benchmark
+ --all Alias for --workspace (deprecated)
+
+Target Selection:
+ --lib Benchmark only this package's library
+ --bins Benchmark all binaries
+ --bin [<NAME>] Benchmark only the specified binary
+ --examples Benchmark all examples
+ --example [<NAME>] Benchmark only the specified example
+ --tests Benchmark all tests
+ --test [<NAME>] Benchmark only the specified test target
+ --benches Benchmark all benches
+ --bench [<NAME>] Benchmark only the specified bench target
+ --all-targets Benchmark all targets
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help bench` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_bench/mod.rs b/src/tools/cargo/tests/testsuite/cargo_bench/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_bench/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_build/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_build/help/mod.rs
new file mode 100644
index 000000000..9ca23b478
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_build/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("build")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log b/src/tools/cargo/tests/testsuite/cargo_build/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_build/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log
new file mode 100644
index 000000000..af906c24f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_build/help/stdout.log
@@ -0,0 +1,58 @@
+Compile a local package and all of its dependencies
+
+Usage: cargo[EXE] build [OPTIONS]
+
+Options:
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --future-incompat-report Outputs a future incompatibility report at the end of the build
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to build (see `cargo help pkgid`)
+ --workspace Build all packages in the workspace
+ --exclude <SPEC> Exclude packages from the build
+ --all Alias for --workspace (deprecated)
+
+Target Selection:
+ --lib Build only this package's library
+ --bins Build all binaries
+ --bin [<NAME>] Build only the specified binary
+ --examples Build all examples
+ --example [<NAME>] Build only the specified example
+ --tests Build all tests
+ --test [<NAME>] Build only the specified test target
+ --benches Build all benches
+ --bench [<NAME>] Build only the specified bench target
+ --all-targets Build all targets
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -r, --release Build artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --out-dir <PATH> Copy final artifacts to this directory (unstable)
+ --build-plan Output the build plan in JSON (unstable)
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help build` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_build/mod.rs b/src/tools/cargo/tests/testsuite/cargo_build/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_build/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_check/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_check/help/mod.rs
new file mode 100644
index 000000000..71571bc95
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_check/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("check")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log b/src/tools/cargo/tests/testsuite/cargo_check/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_check/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log
new file mode 100644
index 000000000..7c87615cd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_check/help/stdout.log
@@ -0,0 +1,56 @@
+Check a local package and all of its dependencies for errors
+
+Usage: cargo[EXE] check [OPTIONS]
+
+Options:
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --future-incompat-report Outputs a future incompatibility report at the end of the build
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package(s) to check
+ --workspace Check all packages in the workspace
+ --exclude <SPEC> Exclude packages from the check
+ --all Alias for --workspace (deprecated)
+
+Target Selection:
+ --lib Check only this package's library
+ --bins Check all binaries
+ --bin [<NAME>] Check only the specified binary
+ --examples Check all examples
+ --example [<NAME>] Check only the specified example
+ --tests Check all tests
+ --test [<NAME>] Check only the specified test target
+ --benches Check all benches
+ --bench [<NAME>] Check only the specified bench target
+ --all-targets Check all targets
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ -r, --release Check artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Check artifacts with the specified profile
+ --target <TRIPLE> Check for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help check` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_check/mod.rs b/src/tools/cargo/tests/testsuite/cargo_check/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_check/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_clean/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_clean/help/mod.rs
new file mode 100644
index 000000000..7225292b8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_clean/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("clean")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log b/src/tools/cargo/tests/testsuite/cargo_clean/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_clean/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log
new file mode 100644
index 000000000..fd3c8855c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_clean/help/stdout.log
@@ -0,0 +1,29 @@
+Remove artifacts that cargo has generated in the past
+
+Usage: cargo[EXE] clean [OPTIONS]
+
+Options:
+ --doc Whether or not to clean just the documentation directory
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to clean artifacts for
+
+Compilation Options:
+ -r, --release Whether or not to clean release artifacts
+ --profile <PROFILE-NAME> Clean artifacts of the specified profile
+ --target <TRIPLE> Target triple to clean output for
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help clean` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_clean/mod.rs b/src/tools/cargo/tests/testsuite/cargo_clean/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_clean/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_config/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_config/help/mod.rs
new file mode 100644
index 000000000..070238ef0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_config/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("config")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log b/src/tools/cargo/tests/testsuite/cargo_config/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_config/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log
new file mode 100644
index 000000000..50caca72a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_config/help/stdout.log
@@ -0,0 +1,18 @@
+Inspect configuration values
+
+Usage: cargo[EXE] config [OPTIONS] <COMMAND>
+
+Commands:
+ get
+
+Options:
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
diff --git a/src/tools/cargo/tests/testsuite/cargo_config.rs b/src/tools/cargo/tests/testsuite/cargo_config/mod.rs
index e367f8e06..dc0a40ed8 100644
--- a/src/tools/cargo/tests/testsuite/cargo_config.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_config/mod.rs
@@ -5,6 +5,8 @@ use cargo_test_support::paths;
use std::fs;
use std::path::PathBuf;
+mod help;
+
fn cargo_process(s: &str) -> cargo_test_support::Execs {
let mut p = cargo_test_support::cargo_process(s);
// Clear out some of the environment added by the default cargo_process so
diff --git a/src/tools/cargo/tests/testsuite/cargo_doc/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_doc/help/mod.rs
new file mode 100644
index 000000000..b0fd4f3e8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_doc/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("doc")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log b/src/tools/cargo/tests/testsuite/cargo_doc/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_doc/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log
new file mode 100644
index 000000000..480e189c1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_doc/help/stdout.log
@@ -0,0 +1,53 @@
+Build a package's documentation
+
+Usage: cargo[EXE] doc [OPTIONS]
+
+Options:
+ --open Opens the docs in a browser after the operation
+ --no-deps Don't build documentation for dependencies
+ --document-private-items Document private items
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to document
+ --workspace Document all packages in the workspace
+ --exclude <SPEC> Exclude packages from the build
+ --all Alias for --workspace (deprecated)
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Target Selection:
+ --lib Document only this package's library
+ --bins Document all binaries
+ --bin [<NAME>] Document only the specified binary
+ --examples Document all examples
+ --example [<NAME>] Document only the specified example
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ -r, --release Build artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help doc` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_doc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_doc/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_doc/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_fetch/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_fetch/help/mod.rs
new file mode 100644
index 000000000..79025bc32
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_fetch/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("fetch")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log b/src/tools/cargo/tests/testsuite/cargo_fetch/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_fetch/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log
new file mode 100644
index 000000000..b9bd6c35b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_fetch/help/stdout.log
@@ -0,0 +1,22 @@
+Fetch dependencies of a package from the network
+
+Usage: cargo[EXE] fetch [OPTIONS]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Compilation Options:
+ --target <TRIPLE> Fetch dependencies for the target triple
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help fetch` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_fetch/mod.rs b/src/tools/cargo/tests/testsuite/cargo_fetch/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_fetch/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_fix/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_fix/help/mod.rs
new file mode 100644
index 000000000..2c67e1556
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_fix/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("fix")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log b/src/tools/cargo/tests/testsuite/cargo_fix/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_fix/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log
new file mode 100644
index 000000000..c0a98218a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_fix/help/stdout.log
@@ -0,0 +1,60 @@
+Automatically fix lint warnings reported by rustc
+
+Usage: cargo[EXE] fix [OPTIONS]
+
+Options:
+ --edition Fix in preparation for the next edition
+ --edition-idioms Fix warnings to migrate to the idioms of an edition
+ --broken-code Fix code even if it already has compiler errors
+ --allow-no-vcs Fix code even if a VCS was not detected
+ --allow-dirty Fix code even if the working directory is dirty
+ --allow-staged Fix code even if the working directory has staged changes
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package(s) to fix
+ --workspace Fix all packages in the workspace
+ --exclude <SPEC> Exclude packages from the fixes
+ --all Alias for --workspace (deprecated)
+
+Target Selection:
+ --lib Fix only this package's library
+ --bins Fix all binaries
+ --bin [<NAME>] Fix only the specified binary
+ --examples Fix all examples
+ --example [<NAME>] Fix only the specified example
+ --tests Fix all tests
+ --test [<NAME>] Fix only the specified test target
+ --benches Fix all benches
+ --bench [<NAME>] Fix only the specified bench target
+ --all-targets Fix all targets (default)
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ -r, --release Fix artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Fix for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help fix` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_fix/mod.rs b/src/tools/cargo/tests/testsuite/cargo_fix/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_fix/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/mod.rs
new file mode 100644
index 000000000..0408ce06b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("generate-lockfile")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log
new file mode 100644
index 000000000..07eff888a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/help/stdout.log
@@ -0,0 +1,19 @@
+Generate the lockfile for a package
+
+Usage: cargo[EXE] generate-lockfile [OPTIONS]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help generate-lockfile` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/mod.rs b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_generate_lockfile/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/mod.rs
new file mode 100644
index 000000000..5ff877fbb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("git-checkout")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log b/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stdout.log
new file mode 100644
index 000000000..675090fd3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_git_checkout/help/stdout.log
@@ -0,0 +1 @@
+The `git-checkout` command has been removed.
diff --git a/src/tools/cargo/tests/testsuite/cargo_git_checkout/mod.rs b/src/tools/cargo/tests/testsuite/cargo_git_checkout/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_git_checkout/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_help/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_help/help/mod.rs
new file mode 100644
index 000000000..af445cda1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_help/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("help")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log b/src/tools/cargo/tests/testsuite/cargo_help/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_help/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log
new file mode 100644
index 000000000..a03946b45
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_help/help/stdout.log
@@ -0,0 +1,18 @@
+Displays help for a cargo subcommand
+
+Usage: cargo[EXE] help [OPTIONS] [COMMAND]
+
+Arguments:
+ [COMMAND]
+
+Options:
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
diff --git a/src/tools/cargo/tests/testsuite/cargo_help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_help/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_help/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/in b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/in
index 1202506b6..1202506b6 120000
--- a/src/tools/cargo/tests/testsuite/init/auto_git/in
+++ b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/in
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/mod.rs
index 68c217520..68c217520 100644
--- a/src/tools/cargo/tests/testsuite/init/auto_git/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/auto_git/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/auto_git/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/auto_git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/auto_git/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/auto_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/in/src/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/in/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/in/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/mod.rs
index 326bd218a..326bd218a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/out/src/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/out/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/out/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/in/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/in/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/in/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/mod.rs
index 1f16fb659..1f16fb659 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/out/Cargo.toml
index 5c6c9158c..5c6c9158c 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/out/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/out/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/out/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_explicit_nosrc/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_explicit_nosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/in/src/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/in/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/in/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/mod.rs
index 12349a09b..12349a09b 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/out/src/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/out/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/out/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/git_autodetect/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/in/case.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/in/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/in/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/mod.rs
index fe65940db..fe65940db 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/out/Cargo.toml
index 8da5fe778..8da5fe778 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/out/case.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/out/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/out/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namenosrc/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namenosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/in/src/case.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/in/src/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/in/src/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/mod.rs
index d3e8e66df..d3e8e66df 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/out/Cargo.toml
index dec0aaea9..dec0aaea9 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/out/src/case.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/out/src/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/out/src/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_namesrc/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_namesrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/in/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/in/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/in/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/mod.rs
index fe65940db..fe65940db 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/out/Cargo.toml
index 5c6c9158c..5c6c9158c 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/out/main.rs
index 65fdcf8da..65fdcf8da 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/out/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/out/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/bin_already_exists_implicit_nosrc/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/bin_already_exists_implicit_nosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/mod.rs
index c9232320a..c9232320a 100644
--- a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/stderr.log
index 9d635a427..9d635a427 100644
--- a/src/tools/cargo/tests/testsuite/init/both_lib_and_bin/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/both_lib_and_bin/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/in/case.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/in/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/in/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/in/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/in/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/mod.rs
index 5e9e1b94c..5e9e1b94c 100644
--- a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/stderr.log
index c08dce96b..c08dce96b 100644
--- a/src/tools/cargo/tests/testsuite/init/cant_create_library_when_both_binlib_present/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/cant_create_library_when_both_binlib_present/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/in/lib.rs
index 321163744..321163744 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/in/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/in/src/lib.rs
index f71455a1a..f71455a1a 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/in/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/mod.rs
index d1cba2ff7..d1cba2ff7 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/out/lib.rs
index 321163744..321163744 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/out/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/out/src/lib.rs
index f71455a1a..f71455a1a 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/stderr.log
index 8dbd2aaf0..8dbd2aaf0 100644
--- a/src/tools/cargo/tests/testsuite/init/confused_by_multiple_lib_files/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/confused_by_multiple_lib_files/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/in/case.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/in/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/in/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/in/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/in/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/mod.rs
index 326bd218a..326bd218a 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/Cargo.toml
index 675c888a5..675c888a5 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/case.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/out/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/out/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_both_binlib_present/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_both_binlib_present/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/in/case.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/in/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/in/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/mod.rs
index 326bd218a..326bd218a 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml
index 8da5fe778..8da5fe778 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/out/case.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/out/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/out/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/stderr.log
index ec428f31c..ec428f31c 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_binary_when_instructed_and_has_lib_file/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_binary_when_instructed_and_has_lib_file/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/in/case.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/in/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/in/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/mod.rs
index 59c192cb9..59c192cb9 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml
index 2c0464468..2c0464468 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/out/case.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/out/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/out/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/stderr.log
index bf070e2da..bf070e2da 100644
--- a/src/tools/cargo/tests/testsuite/init/creates_library_when_instructed_and_has_bin_file/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/creates_library_when_instructed_and_has_bin_file/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/empty_dir/.keep b/src/tools/cargo/tests/testsuite/cargo_init/empty_dir/.keep
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/empty_dir/.keep
+++ b/src/tools/cargo/tests/testsuite/cargo_init/empty_dir/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/empty_dir/mod.rs
index 074954f01..074954f01 100644
--- a/src/tools/cargo/tests/testsuite/init/empty_dir/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/empty_dir/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/in
index 1202506b6..1202506b6 120000
--- a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/in
+++ b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/in
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/mod.rs
index 7314e955c..7314e955c 100644
--- a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/out/src/main.rs
index e7a11a969..e7a11a969 100644
--- a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/out/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/out/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/explicit_bin_with_git/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/explicit_bin_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/in/rustfmt.toml
index b196eaa2d..b196eaa2d 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/in/rustfmt.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/in/rustfmt.toml
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/mod.rs
index ac1fb6271..ac1fb6271 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/rustfmt.toml
index b196eaa2d..b196eaa2d 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/out/rustfmt.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/rustfmt.toml
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/src/lib.rs
index 3b9acffd5..3b9acffd5 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/formats_source/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/formats_source/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/no_filename/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/no_filename/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/formats_source/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/in/.fossil/.keep
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/in/.fossil/.keep
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/in/.fossil/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/mod.rs
index d45ba868a..d45ba868a 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/.fossil-settings/clean-glob
index a9d37c560..a9d37c560 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/clean-glob
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/.fossil-settings/clean-glob
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/.fossil-settings/ignore-glob
index a9d37c560..a9d37c560 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/.fossil-settings/ignore-glob
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/.fossil-settings/ignore-glob
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/fossil_autodetect/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/path_contains_separator/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/fossil_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/mod.rs
index aef47bc7d..aef47bc7d 100644
--- a/src/tools/cargo/tests/testsuite/init/git_autodetect/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/out/Cargo.toml
index 1d9cfe317..1d9cfe317 100644
--- a/src/tools/cargo/tests/testsuite/init/git_autodetect/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/git_autodetect/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/git_autodetect/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/mod.rs
index cd4437c65..cd4437c65 100644
--- a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml
index a6269fdcd..a6269fdcd 100644
--- a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/git_ignore_exists_no_conflicting_entries/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/reserved_name/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/git_ignore_exists_no_conflicting_entries/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/help/mod.rs
new file mode 100644
index 000000000..7f00d347a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("init")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/README.md b/src/tools/cargo/tests/testsuite/cargo_init/help/stderr.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/README.md
+++ b/src/tools/cargo/tests/testsuite/cargo_init/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log
new file mode 100644
index 000000000..5dfb02498
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/help/stdout.log
@@ -0,0 +1,31 @@
+Create a new cargo package in an existing directory
+
+Usage: cargo[EXE] init [OPTIONS] [path]
+
+Arguments:
+ [path] [default: .]
+
+Options:
+ --vcs <VCS> Initialize a new repository for the given version control system (git,
+ hg, pijul, or fossil) or do not initialize any version control at all
+ (none), overriding a global configuration. [possible values: git, hg,
+ pijul, fossil, none]
+ --bin Use a binary (application) template [default]
+ --lib Use a library template
+ --edition <YEAR> Edition to set for the crate generated [possible values: 2015, 2018,
+ 2021]
+ --name <NAME> Set the resulting package name, defaults to the directory name
+ --registry <REGISTRY> Registry to use
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help init` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/in/rustfmt.toml
index b196eaa2d..b196eaa2d 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/in/rustfmt.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/in/rustfmt.toml
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/mod.rs
index fd9394049..fd9394049 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/rustfmt.toml
index b196eaa2d..b196eaa2d 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/rustfmt.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/rustfmt.toml
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/ignores_failure_to_format_source/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_bin/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/ignores_failure_to_format_source/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/in/main.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/in/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/in/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/mod.rs
index 80bec8893..80bec8893 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/out/Cargo.toml
index 5c6c9158c..5c6c9158c 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/out/main.rs
index f328e4d9d..f328e4d9d 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/out/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/out/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_bin_with_git/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_bin_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/in/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/in/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/in/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/mod.rs
index 80bec8893..80bec8893 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/out/Cargo.toml
index 39e95fe94..39e95fe94 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/out/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/out/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/out/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/inferred_lib_with_git/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inferred_lib_with_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/Cargo.toml
index b7a2e9036..b7a2e9036 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/README.md
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/in/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/README.md
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/crates/foo/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/crates/foo/src/main.rs
index 43f0dac5e..43f0dac5e 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/crates/foo/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/crates/foo/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/in/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/mod.rs
index 4e1dda845..4e1dda845 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/Cargo.toml
index b7a2e9036..b7a2e9036 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/crates/foo/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/crates/foo/Cargo.toml
index 137ed1c87..137ed1c87 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/crates/foo/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/crates/foo/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/crates/foo/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/crates/foo/src/main.rs
index 43f0dac5e..43f0dac5e 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/crates/foo/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/crates/foo/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/inherit_workspace_package_table/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/inherit_workspace_package_table/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/mod.rs
index 2b1be9022..2b1be9022 100644
--- a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/stderr.log
index 86d2c665f..86d2c665f 100644
--- a/src/tools/cargo/tests/testsuite/init/invalid_dir_name/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/invalid_dir_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/in/lib.rs
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/in/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/mod.rs
index d3e8e66df..d3e8e66df 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/out/Cargo.toml
index 39e95fe94..39e95fe94 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/out/lib.rs
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/path_contains_separator/in/.keep
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/out/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_nosrc/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_lib/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_nosrc/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/in/src/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/in/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/in/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/mod.rs
index d3e8e66df..d3e8e66df 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/out/src/lib.rs
index 59760b549..59760b549 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/lib_already_exists_src/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/unknown_flags/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/lib_already_exists_src/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/mod.rs
index d45ba868a..d45ba868a 100644
--- a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/mercurial_autodetect/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/with_argument/stdout.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/mercurial_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/mod.rs
index 7853a1a0b..a1988a06a 100644
--- a/src/tools/cargo/tests/testsuite/init/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/mod.rs
@@ -18,6 +18,7 @@ mod formats_source;
mod fossil_autodetect;
mod git_autodetect;
mod git_ignore_exists_no_conflicting_entries;
+mod help;
mod ignores_failure_to_format_source;
mod inferred_bin_with_git;
mod inferred_lib_with_git;
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/in/case.rs
index b31221118..b31221118 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/in/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/in/main.rs
index 7937627b9..7937627b9 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/in/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/in/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/mod.rs
index fdd4476d9..fdd4476d9 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/out/case.rs
index b31221118..b31221118 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/case.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/out/case.rs
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/out/main.rs
index 7937627b9..7937627b9 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/out/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/out/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/stderr.log
index 21a1dabee..21a1dabee 100644
--- a/src/tools/cargo/tests/testsuite/init/multibin_project_name_clash/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/in/.pijul/.keep
+++ b/src/tools/cargo/tests/testsuite/cargo_init/multibin_project_name_clash/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/no_filename/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/no_filename/mod.rs
index 8edfd2823..8edfd2823 100644
--- a/src/tools/cargo/tests/testsuite/init/no_filename/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/no_filename/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/no_filename/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/no_filename/stderr.log
index bd087ec90..bd087ec90 100644
--- a/src/tools/cargo/tests/testsuite/init/no_filename/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/no_filename/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep b/src/tools/cargo/tests/testsuite/cargo_init/no_filename/stdout.log
index e69de29bb..e69de29bb 100644
--- a/src/tools/cargo/tests/testsuite/init/with_argument/in/foo/.keep
+++ b/src/tools/cargo/tests/testsuite/cargo_init/no_filename/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/in/.keep b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/in/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/in/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/mod.rs
index 0a12f4269..0a12f4269 100644
--- a/src/tools/cargo/tests/testsuite/init/path_contains_separator/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/out/Cargo.toml
index 11465f1fc..11465f1fc 100644
--- a/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/out/src/main.rs
index e7a11a969..e7a11a969 100644
--- a/src/tools/cargo/tests/testsuite/init/path_contains_separator/out/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/out/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stderr.log
index d7947aea2..d7947aea2 100644
--- a/src/tools/cargo/tests/testsuite/init/path_contains_separator/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/path_contains_separator/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/in/.pijul/.keep b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/in/.pijul/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/in/.pijul/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/mod.rs
index d45ba868a..d45ba868a 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/.ignore
index 4fffb2f89..4fffb2f89 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/.ignore
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/.ignore
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/pijul_autodetect/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/pijul_autodetect/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/mod.rs
index cc65fd0a1..cc65fd0a1 100644
--- a/src/tools/cargo/tests/testsuite/init/reserved_name/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stderr.log
index 748971bdf..748971bdf 100644
--- a/src/tools/cargo/tests/testsuite/init/reserved_name/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/reserved_name/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/in b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/in
index 1202506b6..1202506b6 120000
--- a/src/tools/cargo/tests/testsuite/init/simple_bin/in
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/in
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/mod.rs
index eaf0955f9..eaf0955f9 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_bin/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/out/Cargo.toml
index a6269fdcd..a6269fdcd 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_bin/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/out/src/main.rs
index e7a11a969..e7a11a969 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_bin/out/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/out/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_bin/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_bin/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/in b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/in
index 1202506b6..1202506b6 120000
--- a/src/tools/cargo/tests/testsuite/init/simple_git/in
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/in
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/mod.rs
index c373fe2a2..c373fe2a2 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/simple_git/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/mod.rs
index 142e86efd..142e86efd 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/out/Cargo.toml
index a6269fdcd..a6269fdcd 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_git_ignore_exists/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_git_ignore_exists/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/in b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/in
index 1202506b6..1202506b6 120000
--- a/src/tools/cargo/tests/testsuite/init/simple_hg/in
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/in
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/mod.rs
index 1d6765453..1d6765453 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/mod.rs
index d45ba868a..d45ba868a 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/out/Cargo.toml
index dcdb8da2c..dcdb8da2c 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_hg_ignore_exists/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_hg_ignore_exists/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/in b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/in
index 1202506b6..1202506b6 120000
--- a/src/tools/cargo/tests/testsuite/init/simple_lib/in
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/in
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/mod.rs
index d6bae5167..d6bae5167 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_lib/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/out/Cargo.toml
index a6269fdcd..a6269fdcd 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_lib/out/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/out/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/out/src/lib.rs
index 7d12d9af8..7d12d9af8 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_lib/out/src/lib.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/out/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stderr.log
index f459bf226..f459bf226 100644
--- a/src/tools/cargo/tests/testsuite/init/simple_lib/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/simple_lib/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/mod.rs
index 4289b4b9e..4289b4b9e 100644
--- a/src/tools/cargo/tests/testsuite/init/unknown_flags/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stderr.log
index 980e8acd8..980e8acd8 100644
--- a/src/tools/cargo/tests/testsuite/init/unknown_flags/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/unknown_flags/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/with_argument/in/foo/.keep b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/in/foo/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/in/foo/.keep
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/mod.rs b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/mod.rs
index 0b5e342a1..0b5e342a1 100644
--- a/src/tools/cargo/tests/testsuite/init/with_argument/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/mod.rs
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/out/foo/Cargo.toml
index 1d9cfe317..1d9cfe317 100644
--- a/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/Cargo.toml
+++ b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/out/foo/Cargo.toml
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/out/foo/src/main.rs
index e7a11a969..e7a11a969 100644
--- a/src/tools/cargo/tests/testsuite/init/with_argument/out/foo/src/main.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/out/foo/src/main.rs
diff --git a/src/tools/cargo/tests/testsuite/init/with_argument/stderr.log b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/stderr.log
index 3847e4e4a..3847e4e4a 100644
--- a/src/tools/cargo/tests/testsuite/init/with_argument/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_init/with_argument/stdout.log b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_init/with_argument/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_install/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_install/help/mod.rs
new file mode 100644
index 000000000..a2c1c724b
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_install/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("install")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_install/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_install/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_install/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log
new file mode 100644
index 000000000..a07fa47f6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_install/help/stdout.log
@@ -0,0 +1,56 @@
+Install a Rust binary. Default location is $HOME/.cargo/bin
+
+Usage: cargo[EXE] install [OPTIONS] [crate]...
+
+Arguments:
+ [crate]...
+
+Options:
+ --version <VERSION> Specify a version to install
+ --index <INDEX> Registry index to install from
+ --registry <REGISTRY> Registry to use
+ --git <URL> Git URL to install the specified crate from
+ --branch <BRANCH> Branch to use when installing from git
+ --tag <TAG> Tag to use when installing from git
+ --rev <SHA> Specific commit to use when installing from git
+ --path <PATH> Filesystem path to local crate to install
+ --root <DIR> Directory to install packages into
+ -f, --force Force overwriting existing crates or binaries
+ --no-track Do not save tracking information
+ --list list all installed packages and their versions
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ --debug Build in debug mode (with the 'dev' profile) instead of release mode
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Target Selection:
+ --bin [<NAME>] Install only the specified binary
+ --bins Install all binaries
+ --example [<NAME>] Install only the specified example
+ --examples Install all examples
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ --profile <PROFILE-NAME> Install artifacts with the specified profile
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help install` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_install/mod.rs b/src/tools/cargo/tests/testsuite/cargo_install/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_install/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_locate_project/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/mod.rs
new file mode 100644
index 000000000..f6b7e8eaf
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("locate-project")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log
new file mode 100644
index 000000000..1c6ea7b25
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_locate_project/help/stdout.log
@@ -0,0 +1,22 @@
+Print a JSON representation of a Cargo.toml file's location
+
+Usage: cargo[EXE] locate-project [OPTIONS]
+
+Options:
+ --workspace Locate Cargo.toml of the workspace root
+ --message-format <FMT> Output representation [possible values: json, plain]
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help locate-project` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_locate_project/mod.rs b/src/tools/cargo/tests/testsuite/cargo_locate_project/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_locate_project/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_login/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_login/help/mod.rs
new file mode 100644
index 000000000..86b95da15
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_login/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("login")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_login/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_login/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_login/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log
new file mode 100644
index 000000000..faec55c18
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_login/help/stdout.log
@@ -0,0 +1,23 @@
+Log in to a registry.
+
+Usage: cargo[EXE] login [OPTIONS] [token] [-- [args]...]
+
+Arguments:
+ [token]
+ [args]... Arguments for the credential provider (unstable)
+
+Options:
+ --registry <REGISTRY> Registry to use
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help login` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_login/mod.rs b/src/tools/cargo/tests/testsuite/cargo_login/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_login/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_logout/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_logout/help/mod.rs
new file mode 100644
index 000000000..f895b60dd
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_logout/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("logout")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_logout/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_logout/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_logout/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log
new file mode 100644
index 000000000..fe328d765
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_logout/help/stdout.log
@@ -0,0 +1,19 @@
+Remove an API token from the registry locally
+
+Usage: cargo[EXE] logout [OPTIONS]
+
+Options:
+ --registry <REGISTRY> Registry to use
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help logout` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_logout/mod.rs b/src/tools/cargo/tests/testsuite/cargo_logout/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_logout/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_metadata/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_metadata/help/mod.rs
new file mode 100644
index 000000000..a88c374fe
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_metadata/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("metadata")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_metadata/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_metadata/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_metadata/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log
new file mode 100644
index 000000000..939fc40c9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_metadata/help/stdout.log
@@ -0,0 +1,30 @@
+Output the resolved dependencies of a package, the concrete used versions including overrides, in
+machine-readable format
+
+Usage: cargo[EXE] metadata [OPTIONS]
+
+Options:
+ --filter-platform <TRIPLE> Only include resolve dependencies matching the given target-triple
+ --no-deps Output information only about the workspace members and don't
+ fetch dependencies
+ --format-version <VERSION> Format version [possible values: 1]
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help metadata` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_metadata/mod.rs b/src/tools/cargo/tests/testsuite/cargo_metadata/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_metadata/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_new/help/mod.rs
new file mode 100644
index 000000000..6a1721deb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("new")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_new/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log
new file mode 100644
index 000000000..7252e0da1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_new/help/stdout.log
@@ -0,0 +1,31 @@
+Create a new cargo package at <path>
+
+Usage: cargo[EXE] new [OPTIONS] <path>
+
+Arguments:
+ <path>
+
+Options:
+ --vcs <VCS> Initialize a new repository for the given version control system (git,
+ hg, pijul, or fossil) or do not initialize any version control at all
+ (none), overriding a global configuration. [possible values: git, hg,
+ pijul, fossil, none]
+ --bin Use a binary (application) template [default]
+ --lib Use a library template
+ --edition <YEAR> Edition to set for the crate generated [possible values: 2015, 2018,
+ 2021]
+ --name <NAME> Set the resulting package name, defaults to the directory name
+ --registry <REGISTRY> Registry to use
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help new` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_new/mod.rs b/src/tools/cargo/tests/testsuite/cargo_new/mod.rs
index e895cf883..887316395 100644
--- a/src/tools/cargo/tests/testsuite/cargo_new/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_new/mod.rs
@@ -1,3 +1,4 @@
+mod help;
mod inherit_workspace_lints;
mod inherit_workspace_package_table;
mod inherit_workspace_package_table_with_edition;
diff --git a/src/tools/cargo/tests/testsuite/cargo_owner/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_owner/help/mod.rs
new file mode 100644
index 000000000..20583e5b1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_owner/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("owner")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_owner/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_owner/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_owner/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log
new file mode 100644
index 000000000..3c8495ff0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_owner/help/stdout.log
@@ -0,0 +1,27 @@
+Manage the owners of a crate on the registry
+
+Usage: cargo[EXE] owner [OPTIONS] [crate]
+
+Arguments:
+ [crate]
+
+Options:
+ -a, --add <LOGIN> Name of a user or team to invite as an owner
+ -r, --remove <LOGIN> Name of a user or team to remove as an owner
+ -l, --list List owners of a crate
+ --index <INDEX> Registry index to modify owners for
+ --token <TOKEN> API token to use when authenticating
+ --registry <REGISTRY> Registry to use
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help owner` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_owner/mod.rs b/src/tools/cargo/tests/testsuite/cargo_owner/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_owner/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_package/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_package/help/mod.rs
new file mode 100644
index 000000000..4e2f28e4f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_package/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("package")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_package/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_package/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_package/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log
new file mode 100644
index 000000000..35e32f313
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_package/help/stdout.log
@@ -0,0 +1,39 @@
+Assemble the local package into a distributable tarball
+
+Usage: cargo[EXE] package [OPTIONS]
+
+Options:
+ -l, --list Print files included in a package without making one
+ --no-verify Don't verify the contents by building them
+ --no-metadata Ignore warnings about a lack of human-usable metadata
+ --allow-dirty Allow dirty working directories to be packaged
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package(s) to assemble
+ --workspace Assemble all packages in the workspace
+ --exclude <SPEC> Don't assemble specified packages
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help package` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_package/mod.rs b/src/tools/cargo/tests/testsuite/cargo_package/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_package/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_pkgid/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/mod.rs
new file mode 100644
index 000000000..6d182d116
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("pkgid")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log
new file mode 100644
index 000000000..ed48bb7ea
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_pkgid/help/stdout.log
@@ -0,0 +1,25 @@
+Print a fully qualified package specification
+
+Usage: cargo[EXE] pkgid [OPTIONS] [spec]
+
+Arguments:
+ [spec]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Argument to get the package ID specifier for
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help pkgid` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_pkgid/mod.rs b/src/tools/cargo/tests/testsuite/cargo_pkgid/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_pkgid/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_publish/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_publish/help/mod.rs
new file mode 100644
index 000000000..183b6aac4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_publish/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("publish")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_publish/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_publish/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_publish/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log
new file mode 100644
index 000000000..c02522887
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_publish/help/stdout.log
@@ -0,0 +1,39 @@
+Upload a package to the registry
+
+Usage: cargo[EXE] publish [OPTIONS]
+
+Options:
+ --dry-run Perform all checks without uploading
+ --index <INDEX> Registry index URL to upload the package to
+ --registry <REGISTRY> Registry to publish to
+ --token <TOKEN> Token to use when uploading
+ --no-verify Don't verify the contents by building them
+ --allow-dirty Allow dirty working directories to be packaged
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to publish
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help publish` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_publish/mod.rs b/src/tools/cargo/tests/testsuite/cargo_publish/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_publish/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/mod.rs
new file mode 100644
index 000000000..d0055f6d8
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("read-manifest")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log
new file mode 100644
index 000000000..83db5413d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_read_manifest/help/stdout.log
@@ -0,0 +1,19 @@
+Print a JSON representation of a Cargo.toml manifest.
+
+Deprecated, use `cargo metadata --no-deps` instead.
+
+Usage: cargo[EXE] read-manifest [OPTIONS]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
diff --git a/src/tools/cargo/tests/testsuite/cargo_read_manifest/mod.rs b/src/tools/cargo/tests/testsuite/cargo_read_manifest/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_read_manifest/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log
index dd71023a8..486ef359f 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/avoid_empty_tables/stderr.log
@@ -1,2 +1 @@
Removing clippy from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log
index f037ebe28..a51bea48c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/build/stderr.log
@@ -1,2 +1 @@
Removing semver from build-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log
index c629b26b1..ccabdb193 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/dev/stderr.log
@@ -1,2 +1 @@
Removing regex from dev-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/Cargo.toml
new file mode 100644
index 000000000..28b028417
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/Cargo.toml
@@ -0,0 +1,8 @@
+# Cargo.toml
+
+[workspace]
+members = ["serde", "serde_derive"]
+
+[patch.crates-io]
+serde = { path = "serde" }
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/Cargo.toml
new file mode 100644
index 000000000..c05589aba
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/Cargo.toml
@@ -0,0 +1,9 @@
+# serde/Cargo.toml
+
+[package]
+name = "serde"
+version = "1.0.0"
+
+[dependencies]
+serde_derive = { path = "../serde_derive" }
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/Cargo.toml
new file mode 100644
index 000000000..2b9b48b50
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/Cargo.toml
@@ -0,0 +1,8 @@
+# serde_derive/Cargo.toml
+
+[package]
+name = "serde_derive"
+version = "1.0.0"
+
+[dev-dependencies]
+serde_json = "1.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/in/serde_derive/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/mod.rs
new file mode 100644
index 000000000..f66478c5d
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/mod.rs
@@ -0,0 +1,27 @@
+use cargo_test_support::compare::assert_ui;
+use cargo_test_support::curr_dir;
+use cargo_test_support::CargoCommand;
+use cargo_test_support::Project;
+
+#[cargo_test]
+fn case() {
+ cargo_test_support::registry::init();
+ cargo_test_support::registry::Package::new("serde", "1.0.0").publish();
+ cargo_test_support::registry::Package::new("serde_json", "1.0.0")
+ .dep("serde", "1.0.0")
+ .publish();
+
+ let project = Project::from_template(curr_dir!().join("in"));
+ let project_root = project.root();
+
+ snapbox::cmd::Command::cargo_ui()
+ .current_dir(&project_root)
+ .arg("remove")
+ .args(["--package", "serde", "serde_derive"])
+ .assert()
+ .code(0)
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+
+ assert_ui().subset_matches(curr_dir!().join("out"), &project_root);
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/Cargo.toml
new file mode 100644
index 000000000..28b028417
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/Cargo.toml
@@ -0,0 +1,8 @@
+# Cargo.toml
+
+[workspace]
+members = ["serde", "serde_derive"]
+
+[patch.crates-io]
+serde = { path = "serde" }
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/Cargo.toml
new file mode 100644
index 000000000..a91d8ebd5
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/Cargo.toml
@@ -0,0 +1,6 @@
+# serde/Cargo.toml
+
+[package]
+name = "serde"
+version = "1.0.0"
+
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/Cargo.toml b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/Cargo.toml
new file mode 100644
index 000000000..2b9b48b50
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/Cargo.toml
@@ -0,0 +1,8 @@
+# serde_derive/Cargo.toml
+
+[package]
+name = "serde_derive"
+version = "1.0.0"
+
+[dev-dependencies]
+serde_json = "1.0.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/src/lib.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/src/lib.rs
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/out/serde_derive/src/lib.rs
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stderr.log
new file mode 100644
index 000000000..b4377b3a4
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stderr.log
@@ -0,0 +1 @@
+ Removing serde_derive from dependencies
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stdout.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_keep_used_patch/stdout.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs
index ec521a5bb..d4d305323 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/mod.rs
@@ -23,6 +23,13 @@ fn case() {
})
.url();
+ let git_project3 = git::new("bar3", |project| {
+ project
+ .file("Cargo.toml", &basic_manifest("bar", "0.1.0"))
+ .file("src/lib.rs", "")
+ })
+ .url();
+
let in_project = project()
.file(
"Cargo.toml",
@@ -38,7 +45,7 @@ fn case() {
bar = {{ git = \"{git_project1}\" }}\n\
\n\
[patch.\"{git_project1}\"]\n\
- bar = {{ git = \"{git_project2}\" }}\n\
+ bar = {{ git = \"{git_project3}\" }}\n\
\n\
[patch.crates-io]\n\
bar = {{ git = \"{git_project2}\" }}\n",
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.lock b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.lock
new file mode 100644
index 000000000..4a1467ba1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/out/Cargo.lock
@@ -0,0 +1,19 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "bar"
+version = "0.1.0"
+source = "git+[..]"
+
+[[package]]
+name = "my-member"
+version = "0.1.0"
+dependencies = [
+ "bar",
+]
+
+[[package]]
+name = "my-project"
+version = "0.1.0"
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log
index 1dd2e7757..ba519ba1b 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_patch/stderr.log
@@ -1,3 +1 @@
Removing bar from dependencies
- Updating git repository `[ROOTURL]/bar2`
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log
index 0e2e38f26..9dee9e2b7 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_profile/stderr.log
@@ -1,2 +1 @@
Removing toml from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log
index 0e2e38f26..9dee9e2b7 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/gc_replace/stderr.log
@@ -1,2 +1 @@
Removing toml from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/help/mod.rs
new file mode 100644
index 000000000..69fb60f03
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("remove")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log
new file mode 100644
index 000000000..81a2d78b6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/help/stdout.log
@@ -0,0 +1,29 @@
+Remove dependencies from a Cargo.toml manifest file
+
+Usage: cargo[EXE] remove [OPTIONS] <DEP_ID>...
+
+Arguments:
+ <DEP_ID>... Dependencies to be removed
+
+Options:
+ --dry-run Don't actually write the manifest
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Section:
+ --dev Remove as development dependency
+ --build Remove as build dependency
+ --target <TARGET> Remove as dependency from the given target platform
+
+Package Selection:
+ -p, --package [<SPEC>] Package to remove from
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs b/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs
index feb08cea4..ea7902bd8 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/mod.rs
@@ -2,9 +2,11 @@ mod avoid_empty_tables;
mod build;
mod dev;
mod dry_run;
+mod gc_keep_used_patch;
mod gc_patch;
mod gc_profile;
mod gc_replace;
+mod help;
mod invalid_arg;
mod invalid_dep;
mod invalid_package;
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log
index 1eb59aca1..111b1e94a 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_deps/stderr.log
@@ -1,3 +1,2 @@
Removing docopt from dependencies
Removing semver from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log
index a3042dcc3..8a69c94f5 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/multiple_dev/stderr.log
@@ -1,3 +1,2 @@
Removing regex from dev-dependencies
Removing serde from dev-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log
index 72c9f9217..d3656ec54 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_dep_feature/stderr.log
@@ -1,2 +1 @@
Removing serde from dev-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log
index 2dc546fa7..ef7354ef1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/optional_feature/stderr.log
@@ -1,2 +1 @@
Removing semver from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log
index 231026f2b..7083976b1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/package/stderr.log
@@ -1,2 +1 @@
Removing docopt from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log
index 231026f2b..7083976b1 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/remove_basic/stderr.log
@@ -1,2 +1 @@
Removing docopt from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log
index 810abd994..8fb1b5000 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target/stderr.log
@@ -1,2 +1 @@
Removing dbus from dependencies for target `x86_64-unknown-linux-gnu`
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log
index b06f8f319..673a47ceb 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_build/stderr.log
@@ -1,2 +1 @@
Removing semver from build-dependencies for target `x86_64-unknown-linux-gnu`
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log
index 68553a3bd..854aff44a 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/target_dev/stderr.log
@@ -1,2 +1 @@
Removing ncurses from dev-dependencies for target `x86_64-unknown-linux-gnu`
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log
index 164f8f4b9..1494b0fc5 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/update_lock_file/stderr.log
@@ -1,2 +1 @@
Removing rustc-serialize from dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log
index f037ebe28..a51bea48c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace/stderr.log
@@ -1,2 +1 @@
Removing semver from build-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log
index f037ebe28..a51bea48c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_non_virtual/stderr.log
@@ -1,2 +1 @@
Removing semver from build-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log
index f037ebe28..a51bea48c 100644
--- a/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log
+++ b/src/tools/cargo/tests/testsuite/cargo_remove/workspace_preserved/stderr.log
@@ -1,2 +1 @@
Removing semver from build-dependencies
- Updating `dummy-registry` index
diff --git a/src/tools/cargo/tests/testsuite/cargo_report/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_report/help/mod.rs
new file mode 100644
index 000000000..3d2975769
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_report/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("report")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_report/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_report/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_report/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log
new file mode 100644
index 000000000..67819de55
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_report/help/stdout.log
@@ -0,0 +1,20 @@
+Generate and display various kinds of reports
+
+Usage: cargo[EXE] report [OPTIONS] <COMMAND>
+
+Commands:
+ future-incompatibilities Reports any crates which will eventually stop compiling
+
+Options:
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help report` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_report/mod.rs b/src/tools/cargo/tests/testsuite/cargo_report/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_report/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_run/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_run/help/mod.rs
new file mode 100644
index 000000000..0a8a6bde0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_run/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("run")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_run/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_run/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_run/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log
new file mode 100644
index 000000000..6ab0e76b1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_run/help/stdout.log
@@ -0,0 +1,47 @@
+Run a binary or example of the local package
+
+Usage: cargo[EXE] run [OPTIONS] [args]...
+
+Arguments:
+ [args]... Arguments for the binary or example to run
+
+Options:
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package with the target to run
+
+Target Selection:
+ --bin [<NAME>] Name of the bin target to run
+ --example [<NAME>] Name of the example target to run
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ -r, --release Build artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help run` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_run/mod.rs b/src/tools/cargo/tests/testsuite/cargo_run/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_run/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustc/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_rustc/help/mod.rs
new file mode 100644
index 000000000..0a3b31686
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustc/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("rustc")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustc/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_rustc/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustc/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log
new file mode 100644
index 000000000..f587c3276
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustc/help/stdout.log
@@ -0,0 +1,58 @@
+Compile a package, and pass extra options to the compiler
+
+Usage: cargo[EXE] rustc [OPTIONS] [args]...
+
+Arguments:
+ [args]... Extra rustc flags
+
+Options:
+ --print <INFO> Output compiler information without compiling
+ --crate-type <CRATE-TYPE> Comma separated list of types of crates for the compiler to emit
+ --future-incompat-report Outputs a future incompatibility report at the end of the build
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to build
+
+Target Selection:
+ --lib Build only this package's library
+ --bins Build all binaries
+ --bin [<NAME>] Build only the specified binary
+ --examples Build all examples
+ --example [<NAME>] Build only the specified example
+ --tests Build all tests
+ --test [<NAME>] Build only the specified test target
+ --benches Build all benches
+ --bench [<NAME>] Build only the specified bench target
+ --all-targets Build all targets
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ -r, --release Build artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Target triple which compiles will be for
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help rustc` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_rustc/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustc/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/mod.rs
new file mode 100644
index 000000000..88652749f
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("rustdoc")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log
new file mode 100644
index 000000000..4cac29e0a
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustdoc/help/stdout.log
@@ -0,0 +1,56 @@
+Build a package's documentation, using specified custom flags.
+
+Usage: cargo[EXE] rustdoc [OPTIONS] [args]...
+
+Arguments:
+ [args]... Extra rustdoc flags
+
+Options:
+ --open Opens the docs in a browser after the operation
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --message-format <FMT> Error format
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to document
+
+Target Selection:
+ --lib Build only this package's library
+ --bins Build all binaries
+ --bin [<NAME>] Build only the specified binary
+ --examples Build all examples
+ --example [<NAME>] Build only the specified example
+ --tests Build all tests
+ --test [<NAME>] Build only the specified test target
+ --benches Build all benches
+ --bench [<NAME>] Build only the specified bench target
+ --all-targets Build all targets
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ --keep-going Do not abort the build as soon as there is an error (unstable)
+ -r, --release Build artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help rustdoc` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_rustdoc/mod.rs b/src/tools/cargo/tests/testsuite/cargo_rustdoc/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_rustdoc/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_search/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_search/help/mod.rs
new file mode 100644
index 000000000..b580816e7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_search/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("search")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_search/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_search/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_search/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log
new file mode 100644
index 000000000..8572064e3
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_search/help/stdout.log
@@ -0,0 +1,24 @@
+Search packages in crates.io
+
+Usage: cargo[EXE] search [OPTIONS] [query]...
+
+Arguments:
+ [query]...
+
+Options:
+ --limit <LIMIT> Limit the number of results (default: 10, max: 100)
+ --index <INDEX> Registry index URL to upload the package to
+ --registry <REGISTRY> Registry to use
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help search` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_search/mod.rs b/src/tools/cargo/tests/testsuite/cargo_search/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_search/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_test/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_test/help/mod.rs
new file mode 100644
index 000000000..ae5b092b7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_test/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("test")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_test/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_test/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_test/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_test/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_test/help/stdout.log
new file mode 100644
index 000000000..d693dc3c9
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_test/help/stdout.log
@@ -0,0 +1,63 @@
+Execute all unit and integration tests and build examples of a local package
+
+Usage: cargo[EXE] test [OPTIONS] [TESTNAME] [-- [args]...]
+
+Arguments:
+ [TESTNAME] If specified, only run tests containing this string in their names
+ [args]... Arguments for the test binary
+
+Options:
+ --doc Test only this library's documentation
+ --no-run Compile, but don't run tests
+ --no-fail-fast Run all tests regardless of failure
+ --ignore-rust-version Ignore `rust-version` specification in packages
+ --future-incompat-report Outputs a future incompatibility report at the end of the build
+ --message-format <FMT> Error format
+ -q, --quiet Display one character per test instead of one line
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to run tests for
+ --workspace Test all packages in the workspace
+ --exclude <SPEC> Exclude packages from the test
+ --all Alias for --workspace (deprecated)
+
+Target Selection:
+ --lib Test only this package's library unit tests
+ --bins Test all binaries
+ --bin [<NAME>] Test only the specified binary
+ --examples Test all examples
+ --example [<NAME>] Test only the specified example
+ --tests Test all tests
+ --test [<NAME>] Test only the specified test target
+ --benches Test all benches
+ --bench [<NAME>] Test only the specified bench target
+ --all-targets Test all targets (does not include doctests)
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ -j, --jobs <N> Number of parallel jobs, defaults to # of CPUs.
+ -r, --release Build artifacts in release mode, with optimizations
+ --profile <PROFILE-NAME> Build artifacts with the specified profile
+ --target <TRIPLE> Build for the target triple
+ --target-dir <DIRECTORY> Directory for all generated artifacts
+ --unit-graph Output build graph in JSON (unstable)
+ --timings[=<FMTS>] Timing output formats (unstable) (comma separated): html, json
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help test` for more detailed information.
+Run `cargo test -- --help` for test binary options.
diff --git a/src/tools/cargo/tests/testsuite/cargo_test/mod.rs b/src/tools/cargo/tests/testsuite/cargo_test/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_test/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_tree/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_tree/help/mod.rs
new file mode 100644
index 000000000..269ac2cdc
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_tree/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("tree")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_tree/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_tree/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_tree/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log
new file mode 100644
index 000000000..268b6b2ad
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_tree/help/stdout.log
@@ -0,0 +1,45 @@
+Display a tree visualization of a dependency graph
+
+Usage: cargo[EXE] tree [OPTIONS]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -e, --edges <KINDS> The kinds of dependencies to display (features, normal, build, dev, all,
+ no-normal, no-build, no-dev, no-proc-macro)
+ -i, --invert [<SPEC>] Invert the tree direction and focus on the given package
+ --prune <SPEC> Prune the given package from the display of the dependency tree
+ --depth <DEPTH> Maximum display depth of the dependency tree
+ --prefix <PREFIX> Change the prefix (indentation) of how each entry is displayed [default:
+ indent] [possible values: depth, indent, none]
+ --no-dedupe Do not de-duplicate (repeats all shared dependencies)
+ -d, --duplicates Show only dependencies which come in multiple versions (implies -i)
+ --charset <CHARSET> Character set to use in output [default: utf8] [possible values: utf8,
+ ascii]
+ -f, --format <FORMAT> Format string used for printing dependencies [default: {p}]
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to be used as the root of the tree
+ --workspace Display the tree for all packages in the workspace
+ --exclude <SPEC> Exclude specific workspace members
+
+Feature Selection:
+ -F, --features <FEATURES> Space or comma separated list of features to activate
+ --all-features Activate all available features
+ --no-default-features Do not activate the `default` feature
+
+Compilation Options:
+ --target <TRIPLE> Filter dependencies matching the given target-triple (default host
+ platform). Pass `all` to include all targets.
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help tree` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_tree/mod.rs b/src/tools/cargo/tests/testsuite/cargo_tree/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_tree/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_uninstall/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/mod.rs
new file mode 100644
index 000000000..60c4faed0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("uninstall")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log
new file mode 100644
index 000000000..2da1a5d57
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_uninstall/help/stdout.log
@@ -0,0 +1,28 @@
+Remove a Rust binary
+
+Usage: cargo[EXE] uninstall [OPTIONS] [spec]...
+
+Arguments:
+ [spec]...
+
+Options:
+ --root <DIR> Directory to uninstall packages from
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -p, --package [<SPEC>] Package to uninstall
+
+Target Selection:
+ --bin <NAME> Only uninstall the binary NAME
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help uninstall` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_uninstall/mod.rs b/src/tools/cargo/tests/testsuite/cargo_uninstall/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_uninstall/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_update/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_update/help/mod.rs
new file mode 100644
index 000000000..ae310977c
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_update/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("update")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_update/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_update/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_update/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log
new file mode 100644
index 000000000..6cc109151
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_update/help/stdout.log
@@ -0,0 +1,26 @@
+Update dependencies as recorded in the local lock file
+
+Usage: cargo[EXE] update [OPTIONS]
+
+Options:
+ --dry-run Don't actually write the lockfile
+ --aggressive Force updating all dependencies of SPEC as well when used with -p
+ --precise <PRECISE> Update a single dependency to exactly PRECISE when used with -p
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Package Selection:
+ -w, --workspace Only update the workspace packages
+ -p, --package [<SPEC>] Package to update
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help update` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_update/mod.rs b/src/tools/cargo/tests/testsuite/cargo_update/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_update/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_vendor/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_vendor/help/mod.rs
new file mode 100644
index 000000000..c111b99c0
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_vendor/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("vendor")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_vendor/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_vendor/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_vendor/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log
new file mode 100644
index 000000000..7f37ab56e
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_vendor/help/stdout.log
@@ -0,0 +1,27 @@
+Vendor all dependencies for a project locally
+
+Usage: cargo[EXE] vendor [OPTIONS] [path]
+
+Arguments:
+ [path] Where to vendor crates (`vendor` by default)
+
+Options:
+ --no-delete Don't delete older crates in the vendor directory
+ -s, --sync <TOML> Additional `Cargo.toml` to sync and vendor
+ --respect-source-config Respect `[source]` config in `.cargo/config`
+ --versioned-dirs Always include version in subdir name
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for
+ details
+ -h, --help Print help
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help vendor` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_vendor/mod.rs b/src/tools/cargo/tests/testsuite/cargo_vendor/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_vendor/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_verify_project/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/mod.rs
new file mode 100644
index 000000000..8f6c9bab1
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("verify-project")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log
new file mode 100644
index 000000000..a61534500
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_verify_project/help/stdout.log
@@ -0,0 +1,19 @@
+Check correctness of crate manifest
+
+Usage: cargo[EXE] verify-project [OPTIONS]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --manifest-path <PATH> Path to Cargo.toml
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help verify-project` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_verify_project/mod.rs b/src/tools/cargo/tests/testsuite/cargo_verify_project/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_verify_project/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_version/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_version/help/mod.rs
new file mode 100644
index 000000000..daa8548c6
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_version/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("version")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_version/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_version/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_version/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log
new file mode 100644
index 000000000..3f79051ad
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_version/help/stdout.log
@@ -0,0 +1,18 @@
+Show version information
+
+Usage: cargo[EXE] version [OPTIONS]
+
+Options:
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help version` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_version/mod.rs b/src/tools/cargo/tests/testsuite/cargo_version/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_version/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/cargo_yank/help/mod.rs b/src/tools/cargo/tests/testsuite/cargo_yank/help/mod.rs
new file mode 100644
index 000000000..12034f152
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_yank/help/mod.rs
@@ -0,0 +1,13 @@
+use cargo_test_support::curr_dir;
+use cargo_test_support::prelude::*;
+
+#[cargo_test]
+fn case() {
+ snapbox::cmd::Command::cargo_ui()
+ .arg("yank")
+ .arg("--help")
+ .assert()
+ .success()
+ .stdout_matches_path(curr_dir!().join("stdout.log"))
+ .stderr_matches_path(curr_dir!().join("stderr.log"));
+}
diff --git a/src/tools/cargo/tests/testsuite/cargo_yank/help/stderr.log b/src/tools/cargo/tests/testsuite/cargo_yank/help/stderr.log
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_yank/help/stderr.log
diff --git a/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log b/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log
new file mode 100644
index 000000000..25b04e6c7
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_yank/help/stdout.log
@@ -0,0 +1,26 @@
+Remove a pushed crate from the index
+
+Usage: cargo[EXE] yank [OPTIONS] [crate]
+
+Arguments:
+ [crate]
+
+Options:
+ --version <VERSION> The version to yank or un-yank
+ --undo Undo a yank, putting a version back into the index
+ --index <INDEX> Registry index to yank from
+ --registry <REGISTRY> Registry to use
+ --token <TOKEN> API token to use when authenticating
+ -q, --quiet Do not print cargo log messages
+ -v, --verbose... Use verbose output (-vv very verbose/build.rs output)
+ --color <WHEN> Coloring: auto, always, never
+ --config <KEY=VALUE> Override a configuration value
+ -Z <FLAG> Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details
+ -h, --help Print help
+
+Manifest Options:
+ --frozen Require Cargo.lock and cache are up to date
+ --locked Require Cargo.lock is up to date
+ --offline Run without accessing the network
+
+Run `cargo help yank` for more detailed information.
diff --git a/src/tools/cargo/tests/testsuite/cargo_yank/mod.rs b/src/tools/cargo/tests/testsuite/cargo_yank/mod.rs
new file mode 100644
index 000000000..c0ce11180
--- /dev/null
+++ b/src/tools/cargo/tests/testsuite/cargo_yank/mod.rs
@@ -0,0 +1 @@
+mod help;
diff --git a/src/tools/cargo/tests/testsuite/credential_process.rs b/src/tools/cargo/tests/testsuite/credential_process.rs
index 8c202c6a3..c010c01cd 100644
--- a/src/tools/cargo/tests/testsuite/credential_process.rs
+++ b/src/tools/cargo/tests/testsuite/credential_process.rs
@@ -1,8 +1,7 @@
//! Tests for credential-process.
-use cargo_test_support::registry::TestRegistry;
+use cargo_test_support::registry::{Package, TestRegistry};
use cargo_test_support::{basic_manifest, cargo_process, paths, project, registry, Project};
-use std::fs::{self, read_to_string};
fn toml_bin(proj: &Project, name: &str) -> String {
proj.bin(name).display().to_string().replace('\\', "\\\\")
@@ -24,7 +23,7 @@ fn gated() {
".cargo/config",
r#"
[registry]
- credential-process = "false"
+ credential-provider = ["false"]
"#,
)
.file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
@@ -65,80 +64,6 @@ or use environment variable CARGO_REGISTRIES_ALTERNATIVE_TOKEN
.run();
}
-#[cargo_test]
-fn warn_both_token_and_process() {
- // Specifying both credential-process and a token in config should issue a warning.
- let _server = registry::RegistryBuilder::new()
- .http_api()
- .http_index()
- .alternative()
- .no_configure_token()
- .build();
- let p = project()
- .file(
- ".cargo/config",
- r#"
- [registries.alternative]
- token = "alternative-sekrit"
- credential-process = "false"
- "#,
- )
- .file(
- "Cargo.toml",
- r#"
- [package]
- name = "foo"
- version = "0.1.0"
- description = "foo"
- authors = []
- license = "MIT"
- homepage = "https://example.com/"
- "#,
- )
- .file("src/lib.rs", "")
- .build();
-
- p.cargo("publish --no-verify --registry alternative -Z credential-process")
- .masquerade_as_nightly_cargo(&["credential-process"])
- .with_status(101)
- .with_stderr(
- "\
-[UPDATING] [..]
-[ERROR] both `token` and `credential-process` were specified in the config for registry `alternative`.
-Only one of these values may be set, remove one or the other to proceed.
-",
- )
- .run();
-
- // Try with global credential-process, and registry-specific `token`.
- // This should silently use the config token, and not run the "false" exe.
- p.change_file(
- ".cargo/config",
- r#"
- [registry]
- credential-process = "false"
-
- [registries.alternative]
- token = "alternative-sekrit"
- "#,
- );
- p.cargo("publish --no-verify --registry alternative -Z credential-process")
- .masquerade_as_nightly_cargo(&["credential-process"])
- .with_stderr(
- "\
-[UPDATING] [..]
-[PACKAGING] foo v0.1.0 [..]
-[PACKAGED] [..]
-[UPLOADING] foo v0.1.0 [..]
-[UPLOADED] foo v0.1.0 [..]
-note: Waiting [..]
-You may press ctrl-c [..]
-[PUBLISHED] foo v0.1.0 [..]
-",
- )
- .run();
-}
-
/// Setup for a test that will issue a command that needs to fetch a token.
///
/// This does the following:
@@ -158,29 +83,14 @@ fn get_token_test() -> (Project, TestRegistry) {
))
.alternative()
.http_api()
+ .http_index()
+ .auth_required()
.build();
- // The credential process to use.
- let cred_proj = project()
- .at("cred_proj")
- .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
- .file(
- "src/main.rs",
- r#"
- use std::fs::File;
- use std::io::Write;
- fn main() {
- let mut f = File::options()
- .write(true)
- .create(true)
- .append(true)
- .open("runs.log")
- .unwrap();
- write!(f, "+");
- println!("sekrit");
- } "#,
- )
- .build();
- cred_proj.cargo("build").run();
+
+ let provider = build_provider(
+ "test-cred",
+ r#"{"Ok":{"kind":"get","token":"sekrit","cache":"session","operation_independent":false}}"#,
+ );
let p = project()
.file(
@@ -189,10 +99,9 @@ fn get_token_test() -> (Project, TestRegistry) {
r#"
[registries.alternative]
index = "{}"
- credential-process = ["{}"]
+ credential-provider = ["{provider}"]
"#,
server.index_url(),
- toml_bin(&cred_proj, "test-cred")
),
)
.file(
@@ -202,7 +111,6 @@ fn get_token_test() -> (Project, TestRegistry) {
name = "foo"
version = "0.1.0"
description = "foo"
- authors = []
license = "MIT"
homepage = "https://example.com/"
"#,
@@ -217,24 +125,22 @@ fn publish() {
// Checks that credential-process is used for `cargo publish`.
let (p, _t) = get_token_test();
- p.cargo("publish --no-verify --registry alternative -Z credential-process")
+ p.cargo("publish --no-verify --registry alternative -Z credential-process -Z registry-auth")
.masquerade_as_nightly_cargo(&["credential-process"])
.with_stderr(
- "\
-[UPDATING] [..]
+ r#"[UPDATING] [..]
+{"v":1,"registry":{"index-url":"[..]","name":"alternative","headers":[..]},"kind":"get","operation":"read","args":[]}
[PACKAGING] foo v0.1.0 [..]
[PACKAGED] [..]
+{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]","args":[]}
[UPLOADING] foo v0.1.0 [..]
[UPLOADED] foo v0.1.0 [..]
note: Waiting [..]
You may press ctrl-c [..]
[PUBLISHED] foo v0.1.0 [..]
-",
+"#,
)
.run();
-
- let calls = read_to_string(p.root().join("runs.log")).unwrap().len();
- assert_eq!(calls, 1);
}
#[cargo_test]
@@ -242,15 +148,8 @@ fn basic_unsupported() {
// Non-action commands don't support login/logout.
let registry = registry::RegistryBuilder::new()
.no_configure_token()
+ .credential_provider(&["cargo:token-from-stdout", "false"])
.build();
- cargo_util::paths::append(
- &paths::home().join(".cargo/config"),
- br#"
- [registry]
- credential-process = "false"
- "#,
- )
- .unwrap();
cargo_process("login -Z credential-process abcdefg")
.replace_crates_io(registry.index_url())
@@ -259,9 +158,10 @@ fn basic_unsupported() {
.with_stderr(
"\
[UPDATING] crates.io index
-[ERROR] credential process `false` cannot be used to log in, \
-the credential-process configuration value must pass the \
-`{action}` argument in the config to support this command
+[ERROR] credential provider `cargo:token-from-stdout false` failed action `login`
+
+Caused by:
+ requested operation not supported
",
)
.run();
@@ -272,9 +172,10 @@ the credential-process configuration value must pass the \
.with_status(101)
.with_stderr(
"\
-[ERROR] credential process `false` cannot be used to log out, \
-the credential-process configuration value must pass the \
-`{action}` argument in the config to support this command
+[ERROR] credential provider `cargo:token-from-stdout false` failed action `logout`
+
+Caused by:
+ requested operation not supported
",
)
.run();
@@ -282,222 +183,475 @@ the credential-process configuration value must pass the \
#[cargo_test]
fn login() {
+ let registry = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .credential_provider(&[
+ &build_provider("test-cred", r#"{"Ok": {"kind": "login"}}"#),
+ "cfg1",
+ "--cfg2",
+ ])
+ .build();
+
+ cargo_process("login -Z credential-process abcdefg -- cmd3 --cmd4")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(registry.index_url())
+ .with_stderr(
+ r#"[UPDATING] [..]
+{"v":1,"registry":{"index-url":"https://github.com/rust-lang/crates.io-index","name":"crates-io"},"kind":"login","token":"abcdefg","login-url":"[..]","args":["cfg1","--cfg2","cmd3","--cmd4"]}
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn logout() {
let server = registry::RegistryBuilder::new()
.no_configure_token()
+ .credential_provider(&[&build_provider(
+ "test-cred",
+ r#"{"Ok": {"kind": "logout"}}"#,
+ )])
.build();
- // The credential process to use.
+
+ cargo_process("logout -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(server.index_url())
+ .with_stderr(
+ r#"{"v":1,"registry":{"index-url":"https://github.com/rust-lang/crates.io-index","name":"crates-io"},"kind":"logout","args":[]}
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn yank() {
+ let (p, _t) = get_token_test();
+
+ p.cargo("yank --version 0.1.0 --registry alternative -Zcredential-process -Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
+ r#"[UPDATING] [..]
+{"v":1,"registry":{"index-url":"[..]","name":"alternative","headers":[..]},"kind":"get","operation":"read","args":[]}
+{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"yank","name":"foo","vers":"0.1.0","args":[]}
+[YANK] foo@0.1.0
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn owner() {
+ let (p, _t) = get_token_test();
+
+ p.cargo("owner --add username --registry alternative -Zcredential-process -Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
+ r#"[UPDATING] [..]
+{"v":1,"registry":{"index-url":"[..]","name":"alternative","headers":[..]},"kind":"get","operation":"read","args":[]}
+{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"owners","name":"foo","args":[]}
+[OWNER] completed!
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn invalid_token_output() {
+ // Error when credential process does not output the expected format for a token.
let cred_proj = project()
.at("cred_proj")
.file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
+ .file("src/main.rs", r#"fn main() { print!("a\nb\n"); } "#)
+ .build();
+ cred_proj.cargo("build").run();
+ let _server = registry::RegistryBuilder::new()
+ .alternative()
+ .credential_provider(&[
+ "cargo:token-from-stdout",
+ &toml_bin(&cred_proj, "test-cred"),
+ ])
+ .no_configure_token()
+ .build();
+
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("publish --no-verify --registry alternative -Z credential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[UPDATING] [..]
+[ERROR] credential provider `[..]test-cred[EXE]` failed action `get`
+
+Caused by:
+ process `[..]` returned more than one line of output; expected a single token
+",
+ )
+ .run();
+}
+
+/// Builds a credential provider that echos the request from cargo to stderr,
+/// and prints the `response` to stdout.
+fn build_provider(name: &str, response: &str) -> String {
+ // The credential process to use.
+ let cred_proj = project()
+ .at(name)
+ .file("Cargo.toml", &basic_manifest(name, "1.0.0"))
.file(
"src/main.rs",
- r#"
- use std::io::Read;
-
- fn main() {{
- assert_eq!(std::env::var("CARGO_REGISTRY_NAME_OPT").unwrap(), "crates-io");
- assert_eq!(std::env::var("CARGO_REGISTRY_INDEX_URL").unwrap(), "https://github.com/rust-lang/crates.io-index");
- assert_eq!(std::env::args().skip(1).next().unwrap(), "store");
+ &r####"
+ fn main() {
+ println!(r#"{{"v":[1]}}"#);
+ assert_eq!(std::env::args().skip(1).next().unwrap(), "--cargo-plugin");
let mut buffer = String::new();
- std::io::stdin().read_to_string(&mut buffer).unwrap();
- assert_eq!(buffer, "abcdefg\n");
- std::fs::write("token-store", buffer).unwrap();
- }}
- "#,
+ std::io::stdin().read_line(&mut buffer).unwrap();
+ eprint!("{}", buffer);
+ use std::io::Write;
+ std::io::stdout().write_all(r###"[RESPONSE]"###.as_bytes()).unwrap();
+ println!();
+ } "####
+ .replace("[RESPONSE]", response),
)
.build();
cred_proj.cargo("build").run();
+ toml_bin(&cred_proj, name)
+}
+#[cargo_test]
+fn all_not_found() {
+ let server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .auth_required()
+ .http_index()
+ .build();
+ let not_found = build_provider("not_found", r#"{"Err": {"kind": "not-found"}}"#);
cargo_util::paths::append(
&paths::home().join(".cargo/config"),
format!(
r#"
[registry]
- credential-process = ["{}", "{{action}}"]
+ global-credential-providers = ["not_found"]
+ [credential-alias]
+ not_found = ["{not_found}"]
"#,
- toml_bin(&cred_proj, "test-cred")
)
.as_bytes(),
)
.unwrap();
- cargo_process("login -Z credential-process abcdefg")
- .masquerade_as_nightly_cargo(&["credential-process"])
+ cargo_process("install -v foo -Zcredential-process -Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
.replace_crates_io(server.index_url())
+ .with_status(101)
.with_stderr(
- "\
-[UPDATING] [..]
-[LOGIN] token for `crates.io` saved
-",
+ r#"[UPDATING] [..]
+[CREDENTIAL] [..]not_found[..] get crates-io
+{"v":1,"registry":{"index-url":"[..]","name":"crates-io","headers":[[..]"WWW-Authenticate: Cargo login_url=\"https://test-registry-login/me\""[..]]},"kind":"get","operation":"read","args":[]}
+[ERROR] failed to query replaced source registry `crates-io`
+
+Caused by:
+ no token found, please run `cargo login`
+ or use environment variable CARGO_REGISTRY_TOKEN
+"#,
)
.run();
- assert_eq!(
- fs::read_to_string(paths::root().join("token-store")).unwrap(),
- "abcdefg\n"
- );
}
#[cargo_test]
-fn logout() {
+fn all_not_supported() {
let server = registry::RegistryBuilder::new()
.no_configure_token()
+ .auth_required()
+ .http_index()
.build();
- // The credential process to use.
- let cred_proj = project()
- .at("cred_proj")
- .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
- .file(
- "src/main.rs",
- r#"
- use std::io::Read;
-
- fn main() {{
- assert_eq!(std::env::var("CARGO_REGISTRY_NAME_OPT").unwrap(), "crates-io");
- assert_eq!(std::env::var("CARGO_REGISTRY_INDEX_URL").unwrap(), "https://github.com/rust-lang/crates.io-index");
- assert_eq!(std::env::args().skip(1).next().unwrap(), "erase");
- std::fs::write("token-store", "").unwrap();
- eprintln!("token for `crates-io` has been erased!")
- }}
- "#,
- )
- .build();
- cred_proj.cargo("build").run();
-
+ let not_supported =
+ build_provider("not_supported", r#"{"Err": {"kind": "url-not-supported"}}"#);
cargo_util::paths::append(
&paths::home().join(".cargo/config"),
format!(
r#"
[registry]
- credential-process = ["{}", "{{action}}"]
+ global-credential-providers = ["not_supported"]
+ [credential-alias]
+ not_supported = ["{not_supported}"]
"#,
- toml_bin(&cred_proj, "test-cred")
)
.as_bytes(),
)
.unwrap();
- cargo_process("logout -Z credential-process")
- .masquerade_as_nightly_cargo(&["credential-process"])
+ cargo_process("install -v foo -Zcredential-process -Zregistry-auth")
+ .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
.replace_crates_io(server.index_url())
+ .with_status(101)
.with_stderr(
- "\
-token for `crates-io` has been erased!
-[LOGOUT] token for `crates-io` has been removed from local storage
-[NOTE] This does not revoke the token on the registry server.
- If you need to revoke the token, visit <https://crates.io/me> \
- and follow the instructions there.
-",
+ r#"[UPDATING] [..]
+[CREDENTIAL] [..]not_supported[..] get crates-io
+{"v":1,"registry":{"index-url":"[..]","name":"crates-io","headers":[[..]"WWW-Authenticate: Cargo login_url=\"https://test-registry-login/me\""[..]]},"kind":"get","operation":"read","args":[]}
+[ERROR] failed to query replaced source registry `crates-io`
+
+Caused by:
+ no credential providers could handle the request
+"#,
)
.run();
- assert_eq!(
- fs::read_to_string(paths::root().join("token-store")).unwrap(),
- ""
- );
}
#[cargo_test]
-fn yank() {
- let (p, _t) = get_token_test();
+fn multiple_providers() {
+ let server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .build();
- p.cargo("yank --version 0.1.0 --registry alternative -Z credential-process")
+ // Set up two credential providers: the first will fail with "UrlNotSupported"
+ // and Cargo should skip it. The second should succeed.
+ let url_not_supported = build_provider(
+ "url_not_supported",
+ r#"{"Err": {"kind": "url-not-supported"}}"#,
+ );
+
+ let success_provider = build_provider("success_provider", r#"{"Ok": {"kind": "login"}}"#);
+
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ format!(
+ r#"
+ [registry]
+ global-credential-providers = ["success_provider", "url_not_supported"]
+
+ [credential-alias]
+ success_provider = ["{success_provider}"]
+ url_not_supported = ["{url_not_supported}"]
+ "#,
+ )
+ .as_bytes(),
+ )
+ .unwrap();
+
+ cargo_process("login -Z credential-process -v abcdefg")
.masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(server.index_url())
.with_stderr(
- "\
-[UPDATING] [..]
-[YANK] foo@0.1.0
-",
+ r#"[UPDATING] [..]
+[CREDENTIAL] [..]url_not_supported[..] login crates-io
+{"v":1,"registry":{"index-url":"https://github.com/rust-lang/crates.io-index","name":"crates-io"},"kind":"login","token":"abcdefg","login-url":"[..]","args":[]}
+[CREDENTIAL] [..]success_provider[..] login crates-io
+{"v":1,"registry":{"index-url":"https://github.com/rust-lang/crates.io-index","name":"crates-io"},"kind":"login","token":"abcdefg","login-url":"[..]","args":[]}
+"#,
)
.run();
}
#[cargo_test]
-fn owner() {
- let (p, _t) = get_token_test();
+fn both_token_and_provider() {
+ let server = registry::RegistryBuilder::new().build();
+ cargo_util::paths::append(
+ &paths::home().join(".cargo/config"),
+ format!(
+ r#"
+ [registry]
+ credential-provider = ["cargo:token"]
+ "#,
+ )
+ .as_bytes(),
+ )
+ .unwrap();
- p.cargo("owner --add username --registry alternative -Z credential-process")
+ cargo_process("login -Z credential-process -v abcdefg")
.masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(server.index_url())
.with_stderr(
- "\
-[UPDATING] [..]
-[OWNER] completed!
-",
+ r#"[UPDATING] [..]
+[WARNING] registry `crates-io` has a token configured in [..]credentials.toml that will be ignored because a credential-provider is configured for this registry`
+[CREDENTIAL] cargo:token login crates-io
+[LOGIN] token for `crates-io` saved
+"#,
)
.run();
+ let credentials =
+ std::fs::read_to_string(paths::home().join(".cargo/credentials.toml")).unwrap();
+ assert_eq!(credentials, "[registry]\ntoken = \"abcdefg\"\n");
}
#[cargo_test]
-fn libexec_path() {
- // cargo: prefixed names use the sysroot
- let server = registry::RegistryBuilder::new()
- .no_configure_token()
- .build();
+fn both_asymmetric_and_token() {
+ let server = registry::RegistryBuilder::new().build();
cargo_util::paths::append(
&paths::home().join(".cargo/config"),
- br#"
- [registry]
- credential-process = "cargo:doesnotexist"
- "#,
+ format!(
+ r#"
+ [registry]
+ token = "foo"
+ secret-key = "bar"
+ "#,
+ )
+ .as_bytes(),
)
.unwrap();
- cargo_process("login -Z credential-process abcdefg")
+ cargo_process("login -Z credential-process -v abcdefg")
.masquerade_as_nightly_cargo(&["credential-process"])
.replace_crates_io(server.index_url())
- .with_status(101)
.with_stderr(
- // FIXME: Update "Caused by" error message once rust/pull/87704 is merged.
- // On Windows, changing to a custom executable resolver has changed the
- // error messages.
- &format!("\
-[UPDATING] [..]
-[ERROR] failed to execute `[..]libexec/cargo-credential-doesnotexist[EXE]` to store authentication token for registry `crates-io`
-
-Caused by:
- [..]
-"),
+ r#"[UPDATING] [..]
+[WARNING] registry `crates-io` has a `secret_key` configured in [..]config that will be ignored because a `token` is also configured, and the `cargo:token` provider is configured with higher precedence
+[CREDENTIAL] cargo:token login crates-io
+[LOGIN] token for `crates-io` saved
+"#,
)
.run();
}
#[cargo_test]
-fn invalid_token_output() {
- // Error when credential process does not output the expected format for a token.
- let _server = registry::RegistryBuilder::new()
- .alternative()
+fn token_caching() {
+ let server = registry::RegistryBuilder::new()
.no_configure_token()
+ .no_configure_registry()
+ .token(cargo_test_support::registry::Token::Plaintext(
+ "sekrit".to_string(),
+ ))
+ .alternative()
+ .http_api()
+ .http_index()
.build();
+
+ // Token should not be re-used if it is expired
+ let expired_provider = build_provider(
+ "test-cred",
+ r#"{"Ok":{"kind":"get","token":"sekrit","cache":{"expires":0},"operation_independent":true}}"#,
+ );
+
+ // Token should not be re-used for a different operation if it is not operation_independent
+ let non_independent_provider = build_provider(
+ "test-cred",
+ r#"{"Ok":{"kind":"get","token":"sekrit","cache":"session","operation_independent":false}}"#,
+ );
+
+ let p = project()
+ .file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [registries.alternative]
+ index = "{}"
+ credential-provider = ["{expired_provider}"]
+ "#,
+ server.index_url(),
+ ),
+ )
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ description = "foo"
+ license = "MIT"
+ homepage = "https://example.com/"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ let output = r#"[UPDATING] `alternative` index
+{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"read","args":[]}
+[PACKAGING] foo v0.1.0 [..]
+[PACKAGED] [..]
+{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]","args":[]}
+[UPLOADING] foo v0.1.0 [..]
+[UPLOADED] foo v0.1.0 [..]
+note: Waiting [..]
+You may press ctrl-c [..]
+[PUBLISHED] foo v0.1.0 [..]
+"#;
+
+ // The output should contain two JSON messages from the provider in boths cases:
+ // The first because the credential is expired, the second because the provider
+ // indicated that the token was non-operation-independent.
+ p.cargo("publish -Z credential-process --registry alternative --no-verify")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(output)
+ .run();
+
+ p.change_file(
+ ".cargo/config",
+ &format!(
+ r#"
+ [registries.alternative]
+ index = "{}"
+ credential-provider = ["{non_independent_provider}"]
+ "#,
+ server.index_url(),
+ ),
+ );
+
+ p.cargo("publish -Z credential-process --registry alternative --no-verify")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(output)
+ .run();
+}
+
+#[cargo_test]
+fn basic_provider() {
let cred_proj = project()
.at("cred_proj")
.file("Cargo.toml", &basic_manifest("test-cred", "1.0.0"))
- .file("src/main.rs", r#"fn main() { print!("a\nb\n"); } "#)
+ .file("src/main.rs", r#"fn main() {
+ eprintln!("CARGO={:?}", std::env::var("CARGO").ok());
+ eprintln!("CARGO_REGISTRY_NAME_OPT={:?}", std::env::var("CARGO_REGISTRY_NAME_OPT").ok());
+ eprintln!("CARGO_REGISTRY_INDEX_URL={:?}", std::env::var("CARGO_REGISTRY_INDEX_URL").ok());
+ print!("sekrit");
+ }"#)
.build();
cred_proj.cargo("build").run();
- cargo_util::paths::append(
- &paths::home().join(".cargo/config"),
- format!(
+ let _server = registry::RegistryBuilder::new()
+ .no_configure_token()
+ .credential_provider(&[
+ "cargo:token-from-stdout",
+ &toml_bin(&cred_proj, "test-cred"),
+ ])
+ .token(cargo_test_support::registry::Token::Plaintext(
+ "sekrit".to_string(),
+ ))
+ .alternative()
+ .http_api()
+ .auth_required()
+ .build();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
r#"
- [registry]
- credential-process = ["{}"]
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ [dependencies.bar]
+ version = "0.0.1"
+ registry = "alternative"
"#,
- toml_bin(&cred_proj, "test-cred")
)
- .as_bytes(),
- )
- .unwrap();
-
- let p = project()
- .file("Cargo.toml", &basic_manifest("foo", "1.0.0"))
- .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
.build();
+ Package::new("bar", "0.0.1").alternative(true).publish();
- p.cargo("publish --no-verify --registry alternative -Z credential-process")
- .masquerade_as_nightly_cargo(&["credential-process"])
- .with_status(101)
+ p.cargo("check -Z credential-process -Z registry-auth")
+ .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
.with_stderr(
"\
-[UPDATING] [..]
-[ERROR] credential process `[..]test-cred[EXE]` returned more than one line of output; expected a single token
+[UPDATING] `alternative` index
+CARGO=Some([..])
+CARGO_REGISTRY_NAME_OPT=Some(\"alternative\")
+CARGO_REGISTRY_INDEX_URL=Some([..])
+[DOWNLOADING] crates ...
+[DOWNLOADED] bar v0.0.1 (registry `alternative`)
+[CHECKING] bar v0.0.1 (registry `alternative`)
+[CHECKING] foo v0.0.1 ([..])
+[FINISHED] [..]
",
)
.run();
diff --git a/src/tools/cargo/tests/testsuite/freshness.rs b/src/tools/cargo/tests/testsuite/freshness.rs
index 86b186af8..f28f1ff46 100644
--- a/src/tools/cargo/tests/testsuite/freshness.rs
+++ b/src/tools/cargo/tests/testsuite/freshness.rs
@@ -14,7 +14,8 @@ use super::death;
use cargo_test_support::paths::{self, CargoPathExt};
use cargo_test_support::registry::Package;
use cargo_test_support::{
- basic_manifest, is_coarse_mtime, project, rustc_host, rustc_host_env, sleep_ms,
+ basic_lib_manifest, basic_manifest, is_coarse_mtime, project, rustc_host, rustc_host_env,
+ sleep_ms,
};
#[cargo_test]
@@ -2814,3 +2815,62 @@ directory sources are not [..]
)
.run();
}
+
+#[cargo_test]
+fn skip_mtime_check_in_selected_cargo_home_subdirs() {
+ let p = project()
+ .at("cargo_home/registry/foo")
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .build();
+ let project_root = p.root();
+ let cargo_home = project_root.parent().unwrap().parent().unwrap();
+ p.cargo("check -v")
+ .env("CARGO_HOME", &cargo_home)
+ .with_stderr(
+ "\
+[CHECKING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]
+[FINISHED] dev [..]",
+ )
+ .run();
+ p.change_file("src/lib.rs", "illegal syntax");
+ p.cargo("check -v")
+ .env("CARGO_HOME", &cargo_home)
+ .with_stderr(
+ "\
+[FRESH] foo v0.5.0 ([CWD])
+[FINISHED] dev [..]",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn use_mtime_cache_in_cargo_home() {
+ let p = project()
+ .at("cargo_home/foo")
+ .file("Cargo.toml", &basic_lib_manifest("foo"))
+ .file("src/lib.rs", "")
+ .build();
+ let project_root = p.root();
+ let cargo_home = project_root.parent().unwrap();
+ p.cargo("check -v")
+ .env("CARGO_HOME", &cargo_home)
+ .with_stderr(
+ "\
+[CHECKING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]
+[FINISHED] dev [..]",
+ )
+ .run();
+ p.change_file("src/lib.rs", "illegal syntax");
+ p.cargo("check -v")
+ .env("CARGO_HOME", &cargo_home)
+ .with_stderr(
+ "\
+[DIRTY] foo v0.5.0 ([CWD]): [..]
+[CHECKING] foo v0.5.0 ([CWD])
+[RUNNING] `rustc --crate-name foo src/lib.rs [..]",
+ )
+ .run_expect_error();
+}
diff --git a/src/tools/cargo/tests/testsuite/lints.rs b/src/tools/cargo/tests/testsuite/lints.rs
index fb31da30a..854de69e9 100644
--- a/src/tools/cargo/tests/testsuite/lints.rs
+++ b/src/tools/cargo/tests/testsuite/lints.rs
@@ -637,3 +637,58 @@ error: unresolved link to `bar`
)
.run();
}
+
+#[cargo_test]
+fn doctest_respects_lints() {
+ let foo = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [lints.rust]
+ confusable-idents = 'allow'
+ "#,
+ )
+ .file(
+ "src/lib.rs",
+ r#"
+/// Test
+///
+/// [`Foo`]
+///
+/// ```
+/// let s = "rust";
+/// let s_s = "rust2";
+/// ```
+pub fn f() {}
+pub const Ě: i32 = 1;
+pub const Ĕ: i32 = 2;
+"#,
+ )
+ .build();
+
+ foo.cargo("check -Zlints")
+ .masquerade_as_nightly_cargo(&["lints"])
+ .with_stderr(
+ "\
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+
+ foo.cargo("test --doc -Zlints")
+ .masquerade_as_nightly_cargo(&["lints"])
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]s
+[DOCTEST] foo
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/lockfile_compat.rs b/src/tools/cargo/tests/testsuite/lockfile_compat.rs
index 63148cc07..97dcff123 100644
--- a/src/tools/cargo/tests/testsuite/lockfile_compat.rs
+++ b/src/tools/cargo/tests/testsuite/lockfile_compat.rs
@@ -966,3 +966,198 @@ version = "0.0.1"
let lock = p.read_lockfile();
assert_match_exact(lockfile, &lock);
}
+
+fn create_branch(repo: &git2::Repository, branch: &str, head_id: git2::Oid) {
+ repo.branch(branch, &repo.find_commit(head_id).unwrap(), true)
+ .unwrap();
+}
+
+fn create_tag(repo: &git2::Repository, tag: &str, head_id: git2::Oid) {
+ repo.tag(
+ tag,
+ &repo.find_object(head_id, None).unwrap(),
+ &repo.signature().unwrap(),
+ "make a new tag",
+ false,
+ )
+ .unwrap();
+}
+
+fn v3_and_git_url_encoded(ref_kind: &str, f: impl FnOnce(&git2::Repository, &str, git2::Oid)) {
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "")
+ });
+ let url = git_project.url();
+ let head_id = repo.head().unwrap().target().unwrap();
+ // Ref name with special characters
+ let git_ref = "a-_+#$)";
+ f(&repo, git_ref, head_id);
+
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "dep1"
+version = "0.5.0"
+source = "git+{url}?{ref_kind}={git_ref}#{head_id}"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "dep1",
+]
+"#,
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ dep1 = {{ git = '{url}', {ref_kind} = '{git_ref}' }}
+ "#,
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", "version = 3")
+ .build();
+
+ p.cargo("check")
+ .with_stderr(format!(
+ "\
+[UPDATING] git repository `{url}`
+[CHECKING] dep1 v0.5.0 ({url}?{ref_kind}={git_ref}#[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [..]
+"
+ ))
+ .run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(&lockfile, &lock);
+
+ // v3 doesn't URL-encode URL parameters, but `url` crate does decode as it
+ // was URL-encoded. Therefore Cargo thinks they are from different source
+ // and clones the repository again.
+ p.cargo("check")
+ .with_stderr(format!(
+ "\
+[UPDATING] git repository `{url}`
+[FINISHED] dev [..]
+"
+ ))
+ .run();
+}
+
+#[cargo_test]
+fn v3_and_git_url_encoded_branch() {
+ v3_and_git_url_encoded("branch", create_branch);
+}
+
+#[cargo_test]
+fn v3_and_git_url_encoded_tag() {
+ v3_and_git_url_encoded("tag", create_tag);
+}
+
+#[cargo_test]
+fn v3_and_git_url_encoded_rev() {
+ v3_and_git_url_encoded("rev", create_tag);
+}
+
+fn v4_and_git_url_encoded(ref_kind: &str, f: impl FnOnce(&git2::Repository, &str, git2::Oid)) {
+ let (git_project, repo) = git::new_repo("dep1", |project| {
+ project
+ .file("Cargo.toml", &basic_lib_manifest("dep1"))
+ .file("src/lib.rs", "")
+ });
+ let url = git_project.url();
+ let head_id = repo.head().unwrap().target().unwrap();
+ // Ref name with special characters
+ let git_ref = "a-_+#$)";
+ let encoded_ref = "a-_%2B%23%24%29";
+ f(&repo, git_ref, head_id);
+
+ let lockfile = format!(
+ r#"# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "dep1"
+version = "0.5.0"
+source = "git+{url}?{ref_kind}={encoded_ref}#{head_id}"
+
+[[package]]
+name = "foo"
+version = "0.0.1"
+dependencies = [
+ "dep1",
+]
+"#,
+ );
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ &format!(
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+
+ [dependencies]
+ dep1 = {{ git = '{url}', {ref_kind} = '{git_ref}' }}
+ "#,
+ ),
+ )
+ .file("src/lib.rs", "")
+ .file("Cargo.lock", "version = 4")
+ .build();
+
+ p.cargo("check -Znext-lockfile-bump")
+ .masquerade_as_nightly_cargo(&["-Znext-lockfile-bump"])
+ .with_stderr(format!(
+ "\
+[UPDATING] git repository `{url}`
+[CHECKING] dep1 v0.5.0 ({url}?{ref_kind}={git_ref}#[..])
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [..]
+"
+ ))
+ .run();
+
+ let lock = p.read_lockfile();
+ assert_match_exact(&lockfile, &lock);
+
+ // Unlike v3_and_git_url_encoded, v4 encodes URL parameters so no git
+ // repository re-clone happen.
+ p.cargo("check -Znext-lockfile-bump")
+ .masquerade_as_nightly_cargo(&["-Znext-lockfile-bump"])
+ .with_stderr("[FINISHED] dev [..]")
+ .run();
+}
+
+#[cargo_test]
+fn v4_and_git_url_encoded_branch() {
+ v4_and_git_url_encoded("branch", create_branch);
+}
+
+#[cargo_test]
+fn v4_and_git_url_encoded_tag() {
+ v4_and_git_url_encoded("tag", create_tag);
+}
+
+#[cargo_test]
+fn v4_and_git_url_encoded_rev() {
+ v4_and_git_url_encoded("rev", create_tag)
+}
diff --git a/src/tools/cargo/tests/testsuite/login.rs b/src/tools/cargo/tests/testsuite/login.rs
index 85b299f28..16bd29dce 100644
--- a/src/tools/cargo/tests/testsuite/login.rs
+++ b/src/tools/cargo/tests/testsuite/login.rs
@@ -109,12 +109,14 @@ fn empty_login_token() {
cargo_process("login")
.replace_crates_io(registry.index_url())
- .with_stdout("please paste the token found on [..]/me below")
.with_stdin("\t\n")
.with_stderr(
"\
[UPDATING] crates.io index
-[ERROR] please provide a non-empty token
+[ERROR] credential provider `cargo:token` failed action `login`
+
+Caused by:
+ please provide a non-empty token
",
)
.with_status(101)
@@ -125,7 +127,10 @@ fn empty_login_token() {
.arg("")
.with_stderr(
"\
-[ERROR] please provide a non-empty token
+[ERROR] credential provider `cargo:token` failed action `login`
+
+Caused by:
+ please provide a non-empty token
",
)
.with_status(101)
@@ -143,7 +148,6 @@ fn invalid_login_token() {
let check = |stdin: &str, stderr: &str, status: i32| {
cargo_process("login")
.replace_crates_io(registry.index_url())
- .with_stdout("please paste the token found on [..]/me below")
.with_stdin(stdin)
.with_stderr(stderr)
.with_status(status)
@@ -153,12 +157,15 @@ fn invalid_login_token() {
let invalid = |stdin: &str| {
check(
stdin,
- "[ERROR] token contains invalid characters.
-Only printable ISO-8859-1 characters are allowed as it is sent in a HTTPS header.",
+ "[ERROR] credential provider `cargo:token` failed action `login`
+
+Caused by:
+ token contains invalid characters.
+ Only printable ISO-8859-1 characters are allowed as it is sent in a HTTPS header.",
101,
)
};
- let valid = |stdin: &str| check(stdin, "[LOGIN] token for `crates.io` saved", 0);
+ let valid = |stdin: &str| check(stdin, "[LOGIN] token for `crates-io` saved", 0);
// Update config.json so that the rest of the tests don't need to care
// whether or not `Updating` is printed.
@@ -166,7 +173,7 @@ Only printable ISO-8859-1 characters are allowed as it is sent in a HTTPS header
"test",
"\
[UPDATING] crates.io index
-[LOGIN] token for `crates.io` saved
+[LOGIN] token for `crates-io` saved
",
0,
);
@@ -184,54 +191,19 @@ Only printable ISO-8859-1 characters are allowed as it is sent in a HTTPS header
#[cargo_test]
fn bad_asymmetric_token_args() {
- // These cases are kept brief as the implementation is covered by clap, so this is only smoke testing that we have clap configured correctly.
- cargo_process("login --key-subject=foo tok")
- .with_stderr_contains(
- "error: the argument '--key-subject <SUBJECT>' cannot be used with '[token]'",
- )
- .with_status(1)
- .run();
-
- cargo_process("login --generate-keypair tok")
- .with_stderr_contains(
- "error: the argument '--generate-keypair' cannot be used with '[token]'",
- )
- .with_status(1)
- .run();
-
- cargo_process("login --secret-key tok")
- .with_stderr_contains("error: the argument '--secret-key' cannot be used with '[token]'")
- .with_status(1)
- .run();
+ let registry = RegistryBuilder::new()
+ .credential_provider(&["cargo:paseto"])
+ .no_configure_token()
+ .build();
- cargo_process("login --generate-keypair --secret-key")
+ // These cases are kept brief as the implementation is covered by clap, so this is only smoke testing that we have clap configured correctly.
+ cargo_process("login -Zcredential-process -- --key-subject")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(registry.index_url())
.with_stderr_contains(
- "error: the argument '--generate-keypair' cannot be used with '--secret-key'",
+ " error: a value is required for '--key-subject <SUBJECT>' but none was supplied",
)
- .with_status(1)
- .run();
-}
-
-#[cargo_test]
-fn asymmetric_requires_nightly() {
- let registry = registry::init();
- cargo_process("login --key-subject=foo")
- .replace_crates_io(registry.index_url())
- .with_status(101)
- .with_stderr_contains("[ERROR] the `key-subject` flag is unstable, pass `-Z registry-auth` to enable it\n\
- See https://github.com/rust-lang/cargo/issues/10519 for more information about the `key-subject` flag.")
- .run();
- cargo_process("login --generate-keypair")
- .replace_crates_io(registry.index_url())
.with_status(101)
- .with_stderr_contains("[ERROR] the `generate-keypair` flag is unstable, pass `-Z registry-auth` to enable it\n\
- See https://github.com/rust-lang/cargo/issues/10519 for more information about the `generate-keypair` flag.")
- .run();
- cargo_process("login --secret-key")
- .replace_crates_io(registry.index_url())
- .with_status(101)
- .with_stderr_contains("[ERROR] the `secret-key` flag is unstable, pass `-Z registry-auth` to enable it\n\
- See https://github.com/rust-lang/cargo/issues/10519 for more information about the `secret-key` flag.")
.run();
}
@@ -250,6 +222,28 @@ fn login_with_no_cargo_dir() {
}
#[cargo_test]
+fn login_with_asymmetric_token_and_subject_on_stdin() {
+ let registry = RegistryBuilder::new()
+ .credential_provider(&["cargo:paseto"])
+ .no_configure_token()
+ .build();
+ let credentials = credentials_toml();
+ cargo_process("login -v -Z credential-process -- --key-subject=foo")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .replace_crates_io(registry.index_url())
+ .with_stderr_contains(
+ "\
+k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
+ )
+ .with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
+ .run();
+ let credentials = fs::read_to_string(&credentials).unwrap();
+ assert!(credentials.starts_with("[registry]\n"));
+ assert!(credentials.contains("secret-key-subject = \"foo\"\n"));
+ assert!(credentials.contains("secret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n"));
+}
+
+#[cargo_test]
fn login_with_differently_sized_token() {
// Verify that the configuration file gets properly truncated.
let registry = registry::init();
@@ -278,7 +272,6 @@ fn login_with_token_on_stdin() {
.run();
cargo_process("login")
.replace_crates_io(registry.index_url())
- .with_stdout("please paste the token found on [..]/me below")
.with_stdin("some token")
.run();
let credentials = fs::read_to_string(&credentials).unwrap();
@@ -286,89 +279,38 @@ fn login_with_token_on_stdin() {
}
#[cargo_test]
-fn login_with_asymmetric_token_and_subject_on_stdin() {
- let registry = registry::init();
- let credentials = credentials_toml();
- fs::remove_file(&credentials).unwrap();
- cargo_process("login --key-subject=foo --secret-key -v -Z registry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
- .replace_crates_io(registry.index_url())
- .with_stdout(
- "\
- please paste the API secret key below
-k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
- )
- .with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
- .run();
- let credentials = fs::read_to_string(&credentials).unwrap();
- assert!(credentials.starts_with("[registry]\n"));
- assert!(credentials.contains("secret-key-subject = \"foo\"\n"));
- assert!(credentials.contains("secret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n"));
-}
-
-#[cargo_test]
fn login_with_asymmetric_token_on_stdin() {
- let registry = registry::init();
+ let _registry = RegistryBuilder::new()
+ .credential_provider(&["cargo:paseto"])
+ .alternative()
+ .no_configure_token()
+ .build();
let credentials = credentials_toml();
- fs::remove_file(&credentials).unwrap();
- cargo_process("login --secret-key -v -Z registry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
- .replace_crates_io(registry.index_url())
- .with_stdout(
+ cargo_process("login -vZ credential-process --registry alternative")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr(
"\
- please paste the API secret key below
-k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
- )
- .with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
- .run();
- let credentials = fs::read_to_string(&credentials).unwrap();
- assert_eq!(credentials, "[registry]\nsecret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n");
-}
-
-#[cargo_test]
-fn login_with_asymmetric_key_subject_without_key() {
- let registry = registry::init();
- let credentials = credentials_toml();
- fs::remove_file(&credentials).unwrap();
- cargo_process("login --key-subject=foo -Z registry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
- .replace_crates_io(registry.index_url())
- .with_stderr_contains("error: need a secret_key to set a key_subject")
- .with_status(101)
- .run();
-
- // ok so add a secret_key to the credentials
- cargo_process("login --secret-key -v -Z registry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
- .replace_crates_io(registry.index_url())
- .with_stdout(
- "please paste the API secret key below
+[UPDATING] [..]
+[CREDENTIAL] cargo:paseto login alternative
k3.public.AmDwjlyf8jAV3gm5Z7Kz9xAOcsKslt_Vwp5v-emjFzBHLCtcANzTaVEghTNEMj9PkQ",
)
.with_stdin("k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36")
.run();
-
- // and then it should work
- cargo_process("login --key-subject=foo -Z registry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
- .replace_crates_io(registry.index_url())
- .run();
-
let credentials = fs::read_to_string(&credentials).unwrap();
- assert!(credentials.starts_with("[registry]\n"));
- assert!(credentials.contains("secret-key-subject = \"foo\"\n"));
- assert!(credentials.contains("secret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n"));
+ assert_eq!(credentials, "[registries.alternative]\nsecret-key = \"k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36\"\n");
}
#[cargo_test]
fn login_with_generate_asymmetric_token() {
- let registry = registry::init();
+ let _registry = RegistryBuilder::new()
+ .credential_provider(&["cargo:paseto"])
+ .alternative()
+ .no_configure_token()
+ .build();
let credentials = credentials_toml();
- fs::remove_file(&credentials).unwrap();
- cargo_process("login --generate-keypair -Z registry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
- .replace_crates_io(registry.index_url())
- .with_stdout("k3.public.[..]")
+ cargo_process("login -Z credential-process --registry alternative")
+ .masquerade_as_nightly_cargo(&["credential-process"])
+ .with_stderr("[UPDATING] `alternative` index\nk3.public.[..]")
.run();
let credentials = fs::read_to_string(&credentials).unwrap();
assert!(credentials.contains("secret-key = \"k3.secret."));
diff --git a/src/tools/cargo/tests/testsuite/main.rs b/src/tools/cargo/tests/testsuite/main.rs
index 2c282c0a3..8279f5818 100644
--- a/src/tools/cargo/tests/testsuite/main.rs
+++ b/src/tools/cargo/tests/testsuite/main.rs
@@ -18,15 +18,50 @@ mod build_script;
mod build_script_env;
mod build_script_extra_link_arg;
mod cache_messages;
+mod cargo;
mod cargo_add;
mod cargo_alias_config;
+mod cargo_bench;
+mod cargo_build;
+mod cargo_check;
+mod cargo_clean;
mod cargo_command;
mod cargo_config;
+mod cargo_doc;
mod cargo_env_config;
mod cargo_features;
+mod cargo_fetch;
+mod cargo_fix;
+mod cargo_generate_lockfile;
+mod cargo_git_checkout;
+mod cargo_help;
+mod cargo_init;
+mod cargo_install;
+mod cargo_locate_project;
+mod cargo_login;
+mod cargo_logout;
+mod cargo_metadata;
mod cargo_new;
+mod cargo_owner;
+mod cargo_package;
+mod cargo_pkgid;
+mod cargo_publish;
+mod cargo_read_manifest;
mod cargo_remove;
+mod cargo_report;
+mod cargo_run;
+mod cargo_rustc;
+mod cargo_rustdoc;
+mod cargo_search;
mod cargo_targets;
+mod cargo_test;
+mod cargo_tree;
+mod cargo_uninstall;
+mod cargo_update;
+mod cargo_vendor;
+mod cargo_verify_project;
+mod cargo_version;
+mod cargo_yank;
mod cfg;
mod check;
mod check_cfg;
@@ -65,7 +100,6 @@ mod glob_targets;
mod help;
mod https;
mod inheritable_workspace_fields;
-mod init;
mod install;
mod install_upgrade;
mod jobserver;
diff --git a/src/tools/cargo/tests/testsuite/owner.rs b/src/tools/cargo/tests/testsuite/owner.rs
index 9fc960c92..7b38bcc5e 100644
--- a/src/tools/cargo/tests/testsuite/owner.rs
+++ b/src/tools/cargo/tests/testsuite/owner.rs
@@ -117,8 +117,8 @@ fn simple_add_with_asymmetric() {
// The http_api server will check that the authorization is correct.
// If the authorization was not sent then we would get an unauthorized error.
p.cargo("owner -a username")
- .arg("-Zregistry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
+ .arg("-Zcredential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
.replace_crates_io(registry.index_url())
.with_status(0)
.run();
@@ -184,9 +184,9 @@ fn simple_remove_with_asymmetric() {
// The http_api server will check that the authorization is correct.
// If the authorization was not sent then we would get an unauthorized error.
p.cargo("owner -r username")
- .arg("-Zregistry-auth")
+ .arg("-Zcredential-process")
.replace_crates_io(registry.index_url())
- .masquerade_as_nightly_cargo(&["registry-auth"])
+ .masquerade_as_nightly_cargo(&["credential-process"])
.with_status(0)
.run();
}
diff --git a/src/tools/cargo/tests/testsuite/package.rs b/src/tools/cargo/tests/testsuite/package.rs
index 3b4328242..010523fda 100644
--- a/src/tools/cargo/tests/testsuite/package.rs
+++ b/src/tools/cargo/tests/testsuite/package.rs
@@ -2983,3 +2983,115 @@ src/main.rs.bak
],
);
}
+
+#[cargo_test]
+#[cfg(windows)] // windows is the platform that is most consistently configured for case insensitive filesystems
+fn normalize_case() {
+ let p = project()
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("src/bar.txt", "") // should be ignored when packaging
+ .build();
+ // Workaround `project()` making a `Cargo.toml` on our behalf
+ std::fs::remove_file(p.root().join("Cargo.toml")).unwrap();
+ std::fs::write(
+ p.root().join("cargo.toml"),
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+ "#,
+ )
+ .unwrap();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+}
+
+#[cargo_test]
+#[cfg(target_os = "linux")] // linux is generally configured to be case sensitive
+fn mixed_case() {
+ let manifest = r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+ "#;
+ let p = project()
+ .file("Cargo.toml", manifest)
+ .file("cargo.toml", manifest)
+ .file("src/main.rs", r#"fn main() { println!("hello"); }"#)
+ .file("src/bar.txt", "") // should be ignored when packaging
+ .build();
+
+ p.cargo("package")
+ .with_stderr(
+ "\
+[WARNING] manifest has no documentation[..]
+See [..]
+[PACKAGING] foo v0.0.1 ([CWD])
+[VERIFYING] foo v0.0.1 ([CWD])
+[COMPILING] foo v0.0.1 ([CWD][..])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[PACKAGED] 4 files, [..] ([..] compressed)
+",
+ )
+ .run();
+ assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
+ p.cargo("package -l")
+ .with_stdout(
+ "\
+Cargo.lock
+Cargo.toml
+Cargo.toml.orig
+src/main.rs
+",
+ )
+ .run();
+ p.cargo("package").with_stdout("").run();
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ validate_crate_contents(
+ f,
+ "foo-0.0.1.crate",
+ &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"],
+ &[],
+ );
+}
diff --git a/src/tools/cargo/tests/testsuite/profile_targets.rs b/src/tools/cargo/tests/testsuite/profile_targets.rs
index a88ca34fd..f2de169b9 100644
--- a/src/tools/cargo/tests/testsuite/profile_targets.rs
+++ b/src/tools/cargo/tests/testsuite/profile_targets.rs
@@ -328,7 +328,7 @@ fn profile_selection_test() {
[RUNNING] `[..]/deps/foo-[..]`
[RUNNING] `[..]/deps/test1-[..]`
[DOCTEST] foo
-[RUNNING] `rustdoc [..]--test [..]
+[RUNNING] `[..] rustdoc [..]--test [..]
").run();
p.cargo("test -vv")
.with_stderr_unordered(
@@ -341,7 +341,7 @@ fn profile_selection_test() {
[RUNNING] `[..]/deps/foo-[..]`
[RUNNING] `[..]/deps/test1-[..]`
[DOCTEST] foo
-[RUNNING] `rustdoc [..]--test [..]
+[RUNNING] `[..] rustdoc [..]--test [..]
",
)
.run();
@@ -395,7 +395,7 @@ fn profile_selection_test_release() {
[RUNNING] `[..]/deps/foo-[..]`
[RUNNING] `[..]/deps/test1-[..]`
[DOCTEST] foo
-[RUNNING] `rustdoc [..]--test [..]`
+[RUNNING] `[..] rustdoc [..]--test [..]`
").run();
p.cargo("test --release -vv")
.with_stderr_unordered(
@@ -408,7 +408,7 @@ fn profile_selection_test_release() {
[RUNNING] `[..]/deps/foo-[..]`
[RUNNING] `[..]/deps/test1-[..]`
[DOCTEST] foo
-[RUNNING] `rustdoc [..]--test [..]
+[RUNNING] `[..] rustdoc [..]--test [..]
",
)
.run();
diff --git a/src/tools/cargo/tests/testsuite/publish.rs b/src/tools/cargo/tests/testsuite/publish.rs
index 45b7c7da5..50ad697d5 100644
--- a/src/tools/cargo/tests/testsuite/publish.rs
+++ b/src/tools/cargo/tests/testsuite/publish.rs
@@ -194,8 +194,8 @@ fn simple_publish_with_asymmetric() {
.file("src/main.rs", "fn main() {}")
.build();
- p.cargo("publish --no-verify -Zregistry-auth --registry dummy-registry")
- .masquerade_as_nightly_cargo(&["registry-auth"])
+ p.cargo("publish --no-verify -Zcredential-process --registry dummy-registry")
+ .masquerade_as_nightly_cargo(&["credential-process"])
.with_stderr(
"\
[UPDATING] `dummy-registry` index
@@ -338,7 +338,7 @@ fn git_deps() {
.file("src/main.rs", "fn main() {}")
.build();
- p.cargo("publish -v --no-verify")
+ p.cargo("publish --no-verify")
.replace_crates_io(registry.index_url())
.with_status(101)
.with_stderr(
@@ -2023,10 +2023,10 @@ fn api_other_error() {
[ERROR] failed to publish to registry at http://127.0.0.1:[..]/
Caused by:
- invalid response from server
+ invalid response body from server
Caused by:
- response body was not valid utf-8
+ invalid utf-8 sequence of [..]
",
)
.run();
diff --git a/src/tools/cargo/tests/testsuite/registry.rs b/src/tools/cargo/tests/testsuite/registry.rs
index bd5e42b45..8982b1cb6 100644
--- a/src/tools/cargo/tests/testsuite/registry.rs
+++ b/src/tools/cargo/tests/testsuite/registry.rs
@@ -3529,3 +3529,38 @@ fn unpack_again_when_cargo_ok_is_unrecognized() {
let ok = fs::read_to_string(&cargo_ok).unwrap();
assert_eq!(&ok, r#"{"v":1}"#);
}
+
+#[cargo_test]
+fn differ_only_by_metadata() {
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ baz = "=0.0.1"
+ "#,
+ )
+ .file("src/main.rs", "fn main() {}")
+ .build();
+
+ Package::new("baz", "0.0.1+b").publish();
+ Package::new("baz", "0.0.1+c").yanked(true).publish();
+
+ p.cargo("check")
+ .with_stderr(
+ "\
+[UPDATING] `dummy-registry` index
+[DOWNLOADING] crates ...
+[DOWNLOADED] [..] v0.0.1+b (registry `dummy-registry`)
+[CHECKING] baz v0.0.1+b
+[CHECKING] foo v0.0.1 ([CWD])
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+",
+ )
+ .run();
+}
diff --git a/src/tools/cargo/tests/testsuite/registry_auth.rs b/src/tools/cargo/tests/testsuite/registry_auth.rs
index 97cdf6748..4422c638a 100644
--- a/src/tools/cargo/tests/testsuite/registry_auth.rs
+++ b/src/tools/cargo/tests/testsuite/registry_auth.rs
@@ -6,8 +6,9 @@ use cargo_test_support::{project, Execs, Project};
fn cargo(p: &Project, s: &str) -> Execs {
let mut e = p.cargo(s);
- e.masquerade_as_nightly_cargo(&["registry-auth"])
- .arg("-Zregistry-auth");
+ e.masquerade_as_nightly_cargo(&["registry-auth", "credential-process"])
+ .arg("-Zregistry-auth")
+ .arg("-Zcredential-process");
e
}
@@ -149,95 +150,6 @@ fn environment_token_with_asymmetric() {
}
#[cargo_test]
-fn warn_both_asymmetric_and_token() {
- let _server = RegistryBuilder::new()
- .alternative()
- .no_configure_token()
- .build();
- let p = project()
- .file(
- ".cargo/config",
- r#"
- [registries.alternative]
- token = "sekrit"
- secret-key = "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
- "#,
- )
- .file(
- "Cargo.toml",
- r#"
- [package]
- name = "foo"
- version = "0.1.0"
- description = "foo"
- authors = []
- license = "MIT"
- homepage = "https://example.com/"
- "#,
- )
- .file("src/lib.rs", "")
- .build();
-
- p.cargo("publish --no-verify --registry alternative")
- .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
- .arg("-Zregistry-auth")
- .with_status(101)
- .with_stderr(
- "\
-[UPDATING] [..]
-[ERROR] both `token` and `secret-key` were specified in the config for registry `alternative`.
-Only one of these values may be set, remove one or the other to proceed.
-",
- )
- .run();
-}
-
-#[cargo_test]
-fn warn_both_asymmetric_and_credential_process() {
- let _server = RegistryBuilder::new()
- .alternative()
- .no_configure_token()
- .build();
- let p = project()
- .file(
- ".cargo/config",
- r#"
- [registries.alternative]
- credential-process = "false"
- secret-key = "k3.secret.fNYVuMvBgOlljt9TDohnaYLblghqaHoQquVZwgR6X12cBFHZLFsaU3q7X3k1Zn36"
- "#,
- )
- .file(
- "Cargo.toml",
- r#"
- [package]
- name = "foo"
- version = "0.1.0"
- description = "foo"
- authors = []
- license = "MIT"
- homepage = "https://example.com/"
- "#,
- )
- .file("src/lib.rs", "")
- .build();
-
- p.cargo("publish --no-verify --registry alternative")
- .masquerade_as_nightly_cargo(&["credential-process", "registry-auth"])
- .arg("-Zcredential-process")
- .arg("-Zregistry-auth")
- .with_status(101)
- .with_stderr(
- "\
-[UPDATING] [..]
-[ERROR] both `credential-process` and `secret-key` were specified in the config for registry `alternative`.
-Only one of these values may be set, remove one or the other to proceed.
-",
- )
- .run();
-}
-
-#[cargo_test]
fn bad_environment_token_with_asymmetric_subject() {
let registry = RegistryBuilder::new()
.alternative()
@@ -463,7 +375,6 @@ fn login() {
let p = make_project();
cargo(&p, "login --registry alternative")
- .with_stdout("please paste the token found on https://test-registry-login/me below")
.with_stdin("sekrit")
.run();
}
@@ -478,7 +389,6 @@ fn login_existing_token() {
let p = make_project();
cargo(&p, "login --registry alternative")
- .with_stdout("please paste the token found on file://[..]/me below")
.with_stdin("sekrit")
.run();
}
diff --git a/src/tools/cargo/tests/testsuite/run.rs b/src/tools/cargo/tests/testsuite/run.rs
index 586502288..64cf4e16c 100644
--- a/src/tools/cargo/tests/testsuite/run.rs
+++ b/src/tools/cargo/tests/testsuite/run.rs
@@ -1,6 +1,8 @@
//! Tests for the `cargo run` command.
-use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, project, Project};
+use cargo_test_support::{
+ basic_bin_manifest, basic_lib_manifest, basic_manifest, project, Project,
+};
use cargo_util::paths::dylib_path_envvar;
#[cargo_test]
@@ -1417,6 +1419,24 @@ fn default_run_workspace() {
}
#[cargo_test]
+fn print_env_verbose() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("a", "0.0.1"))
+ .file("src/main.rs", r#"fn main() {println!("run-a");}"#)
+ .build();
+
+ p.cargo("run -vv")
+ .with_stderr(
+ "\
+[COMPILING] a v0.0.1 ([CWD])
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] rustc --crate-name a[..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] target/debug/a[EXE]`",
+ )
+ .run();
+}
+
+#[cargo_test]
#[cfg(target_os = "macos")]
fn run_link_system_path_macos() {
use cargo_test_support::paths::{self, CargoPathExt};
diff --git a/src/tools/cargo/tests/testsuite/script.rs b/src/tools/cargo/tests/testsuite/script.rs
index fcf58de69..0c0441d62 100644
--- a/src/tools/cargo/tests/testsuite/script.rs
+++ b/src/tools/cargo/tests/testsuite/script.rs
@@ -35,7 +35,7 @@ args: []
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] echo v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/echo[EXE]`
@@ -59,7 +59,7 @@ args: []
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] echo v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/echo[EXE]`
@@ -136,7 +136,7 @@ args: []
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] echo v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/echo[EXE]`
@@ -260,7 +260,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE]`
@@ -289,7 +289,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE]`
@@ -306,7 +306,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE]`
",
@@ -323,7 +323,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE]`
@@ -435,7 +435,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE]`
@@ -460,7 +460,7 @@ args: ["-NotAnArg"]
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE] -NotAnArg`
@@ -485,7 +485,7 @@ args: ["-NotAnArg"]
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE] -NotAnArg`
@@ -510,7 +510,7 @@ args: ["--help"]
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE] --help`
@@ -534,7 +534,7 @@ args: []
"#,
)
.with_stderr(
- r#"[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+ r#"[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] s-h-w-c- v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/s-h-w-c-[EXE]`
@@ -544,6 +544,52 @@ args: []
}
#[cargo_test]
+fn test_name_has_leading_number() {
+ let script = ECHO_SCRIPT;
+ let p = cargo_test_support::project()
+ .file("42answer.rs", script)
+ .build();
+
+ p.cargo("-Zscript -v 42answer.rs")
+ .masquerade_as_nightly_cargo(&["script"])
+ .with_stdout(
+ r#"bin: [..]/debug/answer[EXE]
+args: []
+"#,
+ )
+ .with_stderr(
+ r#"[WARNING] `package.edition` is unspecified, defaulting to `2021`
+[COMPILING] answer v0.0.0 ([ROOT]/foo)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+[RUNNING] `[..]/debug/answer[EXE]`
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
+fn test_name_is_number() {
+ let script = ECHO_SCRIPT;
+ let p = cargo_test_support::project().file("42.rs", script).build();
+
+ p.cargo("-Zscript -v 42.rs")
+ .masquerade_as_nightly_cargo(&["script"])
+ .with_stdout(
+ r#"bin: [..]/debug/package[EXE]
+args: []
+"#,
+ )
+ .with_stderr(
+ r#"[WARNING] `package.edition` is unspecified, defaulting to `2021`
+[COMPILING] package v0.0.0 ([ROOT]/foo)
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
+[RUNNING] `[..]/debug/package[EXE]`
+"#,
+ )
+ .run();
+}
+
+#[cargo_test]
fn script_like_dir() {
let p = cargo_test_support::project()
.file("script.rs/foo", "something")
@@ -600,7 +646,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[UPDATING] `dummy-registry` index
[DOWNLOADING] crates ...
[DOWNLOADED] script v1.0.0 (registry `dummy-registry`)
@@ -640,7 +686,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] bar v0.0.1 ([ROOT]/foo/bar)
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
@@ -670,7 +716,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE] --help`
@@ -699,7 +745,7 @@ fn main() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE] --help`
@@ -724,7 +770,7 @@ args: []
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[ROOT]/home/.cargo/target/[..]/debug/script[EXE]`
@@ -752,7 +798,7 @@ args: []
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[ROOT]/home/.cargo/target/[..]/debug/script[EXE]`
@@ -815,7 +861,7 @@ fn cmd_check_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[CHECKING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
@@ -876,7 +922,7 @@ fn cmd_build_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
@@ -904,7 +950,7 @@ test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; fini
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] test [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] unittests script.rs ([..])
@@ -933,7 +979,7 @@ fn cmd_clean_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
",
)
.run();
@@ -954,7 +1000,7 @@ fn cmd_generate_lockfile_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
",
)
.run();
@@ -1039,7 +1085,7 @@ fn cmd_metadata_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
",
)
.run();
@@ -1095,7 +1141,7 @@ fn cmd_read_manifest_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
",
)
.run();
@@ -1116,7 +1162,7 @@ args: []
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
[COMPILING] script v0.0.0 ([ROOT]/foo)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
[RUNNING] `[..]/debug/script[EXE]`
@@ -1140,7 +1186,7 @@ script v0.0.0 ([ROOT]/foo)
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
",
)
.run();
@@ -1160,7 +1206,7 @@ fn cmd_update_with_embedded() {
)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
",
)
.run();
@@ -1177,7 +1223,61 @@ fn cmd_verify_project_with_embedded() {
.with_json(r#"{"success":"true"}"#)
.with_stderr(
"\
-[WARNING] `package.edition` is unspecifiead, defaulting to `2021`
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cmd_pkgid_with_embedded() {
+ let p = cargo_test_support::project()
+ .file("script.rs", ECHO_SCRIPT)
+ .build();
+
+ p.cargo("-Zscript pkgid --manifest-path script.rs")
+ .masquerade_as_nightly_cargo(&["script"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
+[ERROR] [ROOT]/foo/script.rs is unsupported by `cargo pkgid`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cmd_package_with_embedded() {
+ let p = cargo_test_support::project()
+ .file("script.rs", ECHO_SCRIPT)
+ .build();
+
+ p.cargo("-Zscript package --manifest-path script.rs")
+ .masquerade_as_nightly_cargo(&["script"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
+[ERROR] [ROOT]/foo/script.rs is unsupported by `cargo package`
+",
+ )
+ .run();
+}
+
+#[cargo_test]
+fn cmd_publish_with_embedded() {
+ let p = cargo_test_support::project()
+ .file("script.rs", ECHO_SCRIPT)
+ .build();
+
+ p.cargo("-Zscript publish --manifest-path script.rs")
+ .masquerade_as_nightly_cargo(&["script"])
+ .with_status(101)
+ .with_stderr(
+ "\
+[WARNING] `package.edition` is unspecified, defaulting to `2021`
+[ERROR] [ROOT]/foo/script.rs is unsupported by `cargo publish`
",
)
.run();
diff --git a/src/tools/cargo/tests/testsuite/test.rs b/src/tools/cargo/tests/testsuite/test.rs
index 6a062cfb6..c6ae4ce61 100644
--- a/src/tools/cargo/tests/testsuite/test.rs
+++ b/src/tools/cargo/tests/testsuite/test.rs
@@ -389,10 +389,10 @@ failures:
---- test_hello stdout ----
[..]thread '[..]' panicked at [..]",
)
- .with_stdout_contains("[..]assertion failed[..]")
- .with_stdout_contains("[..]`(left == right)`[..]")
- .with_stdout_contains("[..]left: `\"hello\"`,[..]")
- .with_stdout_contains("[..]right: `\"nope\"`[..]")
+ .with_stdout_contains("[..]assertion [..]failed[..]")
+ .with_stdout_contains("[..]left == right[..]")
+ .with_stdout_contains("[..]left: [..]\"hello\"[..]")
+ .with_stdout_contains("[..]right: [..]\"nope\"[..]")
.with_stdout_contains("[..]src/main.rs:12[..]")
.with_stdout_contains(
"\
@@ -4794,6 +4794,21 @@ error: test failed, to rerun pass `--test t2`
Caused by:
process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4)
+note: test exited abnormally; to see the full output pass --nocapture to the harness.
+",
+ )
+ .with_status(4)
+ .run();
+
+ p.cargo("test --test t2 -- --nocapture")
+ .with_stderr(
+ "\
+[FINISHED] test [..]
+[RUNNING] tests/t2.rs (target/debug/deps/t2[..])
+error: test failed, to rerun pass `--test t2`
+
+Caused by:
+ process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4)
",
)
.with_status(4)
@@ -4811,6 +4826,7 @@ error: test failed, to rerun pass `--test t2`
Caused by:
process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4)
+note: test exited abnormally; to see the full output pass --nocapture to the harness.
error: 2 targets failed:
`--test t1`
`--test t2`
@@ -4818,4 +4834,51 @@ error: 2 targets failed:
)
.with_status(101)
.run();
+
+ p.cargo("test --no-fail-fast -- --nocapture")
+ .with_stderr_does_not_contain("test exited abnormally; to see the full output pass --nocapture to the harness.")
+ .with_stderr_contains("[..]thread 't' panicked [..] tests/t1[..]")
+ .with_stderr_contains("note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace")
+ .with_stderr_contains("[..]process didn't exit successfully: `[ROOT]/foo/target/debug/deps/t2[..]` (exit [..]: 4)")
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_no_keep_going() {
+ let p = project()
+ .file("Cargo.toml", &basic_bin_manifest("foo"))
+ .file("src/main.rs", "")
+ .build();
+
+ p.cargo("test --keep-going")
+ .with_stderr(
+ "\
+error: unexpected argument `--keep-going` found
+
+ tip: to run as many tests as possible without failing fast, use `--no-fail-fast`",
+ )
+ .with_status(101)
+ .run();
+}
+
+#[cargo_test]
+fn cargo_test_print_env_verbose() {
+ let p = project()
+ .file("Cargo.toml", &basic_manifest("foo", "0.0.1"))
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("test -vv")
+ .with_stderr(
+ "\
+[COMPILING] foo v0.0.1 ([CWD])
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] rustc --crate-name foo[..]`
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] rustc --crate-name foo[..]`
+[FINISHED] test [unoptimized + debuginfo] target(s) in [..]
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] [CWD]/target/debug/deps/foo-[..][EXE]`
+[DOCTEST] foo
+[RUNNING] `[..]CARGO_MANIFEST_DIR=[CWD][..] rustdoc --crate-type lib --crate-name foo[..]",
+ )
+ .run();
}
diff --git a/src/tools/cargo/tests/testsuite/update.rs b/src/tools/cargo/tests/testsuite/update.rs
index 1d3ee05b7..d42345355 100644
--- a/src/tools/cargo/tests/testsuite/update.rs
+++ b/src/tools/cargo/tests/testsuite/update.rs
@@ -464,6 +464,46 @@ fn update_aggressive() {
.run();
}
+#[cargo_test]
+fn update_aggressive_conflicts_with_precise() {
+ Package::new("log", "0.1.0").publish();
+ Package::new("serde", "0.2.1").dep("log", "0.1").publish();
+
+ let p = project()
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ serde = "0.2"
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ p.cargo("check").run();
+
+ Package::new("log", "0.1.1").publish();
+ Package::new("serde", "0.2.2").dep("log", "0.1").publish();
+
+ p.cargo("update -p serde:0.2.1 --precise 0.2.2 --aggressive")
+ .with_status(1)
+ .with_stderr(
+ "\
+error: the argument '--precise <PRECISE>' cannot be used with '--aggressive'
+
+Usage: cargo[EXE] update --package [<SPEC>] --precise <PRECISE>
+
+For more information, try '--help'.
+",
+ )
+ .run();
+}
+
// cargo update should respect its arguments even without a lockfile.
// See issue "Running cargo update without a Cargo.lock ignores arguments"
// at <https://github.com/rust-lang/cargo/issues/6872>.
diff --git a/src/tools/cargo/tests/testsuite/yank.rs b/src/tools/cargo/tests/testsuite/yank.rs
index 684a04508..c0bd24776 100644
--- a/src/tools/cargo/tests/testsuite/yank.rs
+++ b/src/tools/cargo/tests/testsuite/yank.rs
@@ -76,14 +76,14 @@ fn explicit_version_with_asymmetric() {
// The http_api server will check that the authorization is correct.
// If the authorization was not sent then we would get an unauthorized error.
p.cargo("yank --version 0.0.1")
- .arg("-Zregistry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
+ .arg("-Zcredential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
.replace_crates_io(registry.index_url())
.run();
p.cargo("yank --undo --version 0.0.1")
- .arg("-Zregistry-auth")
- .masquerade_as_nightly_cargo(&["registry-auth"])
+ .arg("-Zcredential-process")
+ .masquerade_as_nightly_cargo(&["credential-process"])
.replace_crates_io(registry.index_url())
.run();
}
diff --git a/src/tools/clippy/.github/workflows/clippy.yml b/src/tools/clippy/.github/workflows/clippy.yml
index c582c28cd..410ff53a2 100644
--- a/src/tools/clippy/.github/workflows/clippy.yml
+++ b/src/tools/clippy/.github/workflows/clippy.yml
@@ -50,7 +50,7 @@ jobs:
echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV
- name: Build
- run: cargo build --features deny-warnings,internal
+ run: cargo build --tests --features deny-warnings,internal
- name: Test
run: cargo test --features deny-warnings,internal
diff --git a/src/tools/clippy/.github/workflows/clippy_bors.yml b/src/tools/clippy/.github/workflows/clippy_bors.yml
index d5ab313ba..5c69714bc 100644
--- a/src/tools/clippy/.github/workflows/clippy_bors.yml
+++ b/src/tools/clippy/.github/workflows/clippy_bors.yml
@@ -106,7 +106,7 @@ jobs:
echo "$SYSROOT/bin" >> $GITHUB_PATH
- name: Build
- run: cargo build --features deny-warnings,internal
+ run: cargo build --tests --features deny-warnings,internal
- name: Test
if: runner.os == 'Linux'
@@ -187,16 +187,14 @@ jobs:
- name: Extract Binaries
run: |
DIR=$CARGO_TARGET_DIR/debug
- rm $DIR/deps/integration-*.d
- mv $DIR/deps/integration-* $DIR/integration
+ find $DIR/deps/integration-* -executable ! -type d | xargs -I {} mv {} $DIR/integration
find $DIR ! -executable -o -type d ! -path $DIR | xargs rm -rf
- rm -rf $CARGO_TARGET_DIR/release
- name: Upload Binaries
- uses: actions/upload-artifact@v1
+ uses: actions/upload-artifact@v3
with:
- name: target
- path: target
+ name: binaries
+ path: target/debug
integration:
needs: integration_build
@@ -206,22 +204,20 @@ jobs:
matrix:
integration:
- 'rust-lang/cargo'
- # FIXME: re-enable once fmt_macros is renamed in RLS
- # - 'rust-lang/rls'
- 'rust-lang/chalk'
- 'rust-lang/rustfmt'
- 'Marwes/combine'
- 'Geal/nom'
- 'rust-lang/stdarch'
- 'serde-rs/serde'
- # FIXME: chrono currently cannot be compiled with `--all-targets`
- # - 'chronotope/chrono'
+ - 'chronotope/chrono'
- 'hyperium/hyper'
- 'rust-random/rand'
- 'rust-lang/futures-rs'
- 'rust-itertools/itertools'
- 'rust-lang-nursery/failure'
- 'rust-lang/log'
+ - 'matthiaskrgr/clippy_ci_panic_test'
runs-on: ubuntu-latest
@@ -237,12 +233,17 @@ jobs:
- name: Install toolchain
run: rustup show active-toolchain
+ - name: Set LD_LIBRARY_PATH
+ run: |
+ SYSROOT=$(rustc --print sysroot)
+ echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV
+
# Download
- name: Download target dir
- uses: actions/download-artifact@v1
+ uses: actions/download-artifact@v3
with:
- name: target
- path: target
+ name: binaries
+ path: target/debug
- name: Make Binaries Executable
run: chmod +x $CARGO_TARGET_DIR/debug/*
@@ -251,7 +252,7 @@ jobs:
- name: Test ${{ matrix.integration }}
run: |
RUSTUP_TOOLCHAIN="$(rustup show active-toolchain | grep -o -E "nightly-[0-9]{4}-[0-9]{2}-[0-9]{2}")" \
- $CARGO_TARGET_DIR/debug/integration
+ $CARGO_TARGET_DIR/debug/integration --show-output
env:
INTEGRATION: ${{ matrix.integration }}
diff --git a/src/tools/clippy/CHANGELOG.md b/src/tools/clippy/CHANGELOG.md
index 14d822083..71671273c 100644
--- a/src/tools/clippy/CHANGELOG.md
+++ b/src/tools/clippy/CHANGELOG.md
@@ -6,13 +6,74 @@ document.
## Unreleased / Beta / In Rust Nightly
-[83e42a23...master](https://github.com/rust-lang/rust-clippy/compare/83e42a23...master)
+[435a8ad8...master](https://github.com/rust-lang/rust-clippy/compare/435a8ad8...master)
+
+## Rust 1.71
+
+Current stable, released 2023-07-13
+
+<!-- FIXME: Remove the request for feedback, with the next changelog -->
+
+We're trying out a new shorter changelog format, that only contains significant changes.
+You can check out the list of merged pull requests for a list of all changes.
+If you have any feedback related to the new format, please share it in
+[#10847](https://github.com/rust-lang/rust-clippy/issues/10847)
+
+[View all 78 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-04-11T20%3A05%3A26Z..2023-05-20T13%3A48%3A17Z+base%3Amaster)
+
+### New Lints
+
+* [`non_minimal_cfg`]
+ [#10763](https://github.com/rust-lang/rust-clippy/pull/10763)
+* [`manual_next_back`]
+ [#10769](https://github.com/rust-lang/rust-clippy/pull/10769)
+* [`ref_patterns`]
+ [#10736](https://github.com/rust-lang/rust-clippy/pull/10736)
+* [`default_constructed_unit_structs`]
+ [#10716](https://github.com/rust-lang/rust-clippy/pull/10716)
+* [`manual_while_let_some`]
+ [#10647](https://github.com/rust-lang/rust-clippy/pull/10647)
+* [`needless_bool_assign`]
+ [#10432](https://github.com/rust-lang/rust-clippy/pull/10432)
+* [`items_after_test_module`]
+ [#10578](https://github.com/rust-lang/rust-clippy/pull/10578)
+
+### Moves and Deprecations
+
+* Rename `integer_arithmetic` to `arithmetic_side_effects`
+ [#10674](https://github.com/rust-lang/rust-clippy/pull/10674)
+* Moved [`redundant_clone`] to `nursery` (Now allow-by-default)
+ [#10873](https://github.com/rust-lang/rust-clippy/pull/10873)
+
+### Enhancements
+
+* [`invalid_regex`]: Now supports the new syntax introduced after regex v1.8.0
+ [#10682](https://github.com/rust-lang/rust-clippy/pull/10682)
+* [`semicolon_outside_block`]: Added [`semicolon-outside-block-ignore-multiline`] as a new config value.
+ [#10656](https://github.com/rust-lang/rust-clippy/pull/10656)
+* [`semicolon_inside_block`]: Added [`semicolon-inside-block-ignore-singleline`] as a new config value.
+ [#10656](https://github.com/rust-lang/rust-clippy/pull/10656)
+* [`unnecessary_box_returns`]: Added [`unnecessary-box-size`] as a new config value to set the maximum
+ size of `T` in `Box<T>` to be linted.
+ [#10651](https://github.com/rust-lang/rust-clippy/pull/10651)
+
+### Documentation Improvements
+
+* `cargo clippy --explain LINT` now shows possible configuration options for the explained lint
+ [#10751](https://github.com/rust-lang/rust-clippy/pull/10751)
+* New config values mentioned in this changelog will now be linked.
+ [#10889](https://github.com/rust-lang/rust-clippy/pull/10889)
+* Several sections of [Clippy's book] have been reworked
+ [#10652](https://github.com/rust-lang/rust-clippy/pull/10652)
+ [#10622](https://github.com/rust-lang/rust-clippy/pull/10622)
+
+[Clippy's book]: https://doc.rust-lang.org/clippy/
## Rust 1.70
-Current stable, released 2023-06-01
+Released 2023-06-01
-[**View 85 PRs merged since 1.69**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-04-20..2023-06-01+base%3Amaster+sort%3Amerged-desc+)
+[View all 91 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-02-26T01%3A05%3A43Z..2023-04-11T13%3A27%3A30Z+base%3Amaster)
### New Lints
@@ -137,7 +198,7 @@ Current stable, released 2023-06-01
Released 2023-04-20
-[**View 86 PRs merged since 1.68**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-03-09..2023-04-20+base%3Amaster+sort%3Amerged-desc+)
+[View all 72 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-01-13T06%3A12%3A46Z..2023-02-25T23%3A48%3A10Z+base%3Amaster)
### New Lints
@@ -252,7 +313,7 @@ Released 2023-04-20
Released 2023-03-09
-[**View 85 PRs merged since 1.67**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-01-26..2023-03-09+base%3Amaster+sort%3Amerged-desc+)
+[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-12-01T20%3A40%3A04Z..2023-01-12T18%3A58%3A59Z+base%3Amaster)
### New Lints
@@ -399,7 +460,7 @@ Released 2023-03-09
Released 2023-01-26
-[**View 68 PRs merged since 1.66**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-12-15..2023-01-26+base%3Amaster+sort%3Amerged-desc+)
+[View all 104 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-10-23T13%3A35%3A19Z..2022-12-01T13%3A34%3A39Z+base%3Amaster)
### New Lints
@@ -590,8 +651,7 @@ Released 2023-01-26
Released 2022-12-15
-[**View 93 PRs merged since 1.65**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-11-03..2022-12-15+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 116 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-09-09T17%3A32%3A39Z..2022-10-23T11%3A27%3A24Z+base%3Amaster)
### New Lints
@@ -762,8 +822,7 @@ Released 2022-12-15
Released 2022-11-03
-[**View 129 PRs merged since 1.64**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-09-22..2022-11-03+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 86 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-07-29T01%3A09%3A31Z..2022-09-09T00%3A01%3A54Z+base%3Amaster)
### Important Changes
@@ -907,8 +966,7 @@ Released 2022-11-03
Released 2022-09-22
-[**View 92 PRs merged since 1.63**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-08-11..2022-09-22+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 110 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-06-17T21%3A25%3A31Z..2022-07-28T17%3A11%3A18Z+base%3Amaster)
### New Lints
@@ -1058,8 +1116,7 @@ Released 2022-09-22
Released 2022-08-11
-[**View 100 PRs merged since 1.62**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-06-30..2022-08-11+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 91 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-05-05T17%3A24%3A22Z..2022-06-16T14%3A24%3A48Z+base%3Amaster)
### New Lints
@@ -1205,8 +1262,7 @@ Released 2022-08-11
Released 2022-06-30
-[**View 104 PRs merged since 1.61**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-05-19..2022-06-30+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 90 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-03-25T17%3A22%3A30Z..2022-05-05T13%3A29%3A44Z+base%3Amaster)
### New Lints
@@ -1363,8 +1419,7 @@ Released 2022-06-30
Released 2022-05-19
-[**View 93 PRs merged since 1.60**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-04-07..2022-05-19+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 60 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-02-11T16%3A54%3A41Z..2022-03-24T13%3A42%3A25Z+base%3Amaster)
### New Lints
@@ -1465,8 +1520,7 @@ Released 2022-05-19
Released 2022-04-07
-[**View 75 PRs merged since 1.59**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-02-24..2022-04-07+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 73 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-12-31T17%3A53%3A37Z..2022-02-10T17%3A31%3A37Z+base%3Amaster)
### New Lints
@@ -1598,8 +1652,7 @@ Released 2022-04-07
Released 2022-02-24
-[**View 63 PRs merged since 1.58**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-01-13..2022-02-24+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 94 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-11-04T12%3A40%3A18Z..2021-12-30T13%3A36%3A20Z+base%3Amaster)
### New Lints
@@ -1763,8 +1816,7 @@ Released 2022-02-24
Released 2022-01-13
-[**View 73 PRs merged since 1.57**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-12-02..2022-01-13+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 68 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-10-07T09%3A49%3A18Z..2021-11-04T12%3A20%3A12Z+base%3Amaster)
### Rust 1.58.1
@@ -1885,8 +1937,7 @@ Released 2022-01-13
Released 2021-12-02
-[**View 92 PRs merged since 1.56**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-10-21..2021-12-02+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 148 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-08-12T20%3A36%3A04Z..2021-11-03T17%3A57%3A59Z+base%3Amaster)
### New Lints
@@ -2037,7 +2088,7 @@ Released 2021-12-02
Released 2021-10-21
-[**View 92 PRs merged since 1.55**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-09-09..2021-10-21+base%3Amaster+sort%3Amerged-desc+)
+[View all 38 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-07-19T14%3A33%3A33Z..2021-08-12T09%3A28%3A38Z+base%3Amaster)
### New Lints
@@ -2103,7 +2154,7 @@ Released 2021-10-21
Released 2021-09-09
-[**View 61 PRs merged since 1.54**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-07-29..2021-09-09+base%3Amaster+sort%3Amerged-desc+)
+[View all 83 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-06-03T07%3A23%3A59Z..2021-07-29T11%3A47%3A32Z+base%3Amaster)
### Important Changes
@@ -2221,8 +2272,7 @@ Released 2021-09-09
Released 2021-07-29
-[**View 68 PRs merged since 1.53**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-06-17..2021-07-29+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 74 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-04-27T23%3A51%3A18Z..2021-06-03T06%3A54%3A07Z+base%3Amaster)
### New Lints
@@ -2350,7 +2400,7 @@ Released 2021-07-29
Released 2021-06-17
-[**View 80 PRs merged since 1.52**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-05-06..2021-06-17+base%3Amaster+sort%3Amerged-desc+)
+[View all 126 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-03-12T22%3A49%3A20Z..2021-04-27T14%3A38%3A20Z+base%3Amaster)
### New Lints
@@ -2534,8 +2584,7 @@ Released 2021-06-17
Released 2021-05-06
-[**View 113 PRs merged since 1.51**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-03-25..2021-05-06+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 102 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-02-03T15%3A59%3A06Z..2021-03-11T20%3A06%3A43Z+base%3Amaster)
### New Lints
@@ -2670,8 +2719,7 @@ Released 2021-05-06
Released 2021-03-25
-[**View 117 PRs merged since 1.50**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-02-11..2021-03-25+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 78 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-12-21T15%3A43%3A04Z..2021-02-03T04%3A21%3A10Z+base%3Amaster)
### New Lints
@@ -2786,8 +2834,7 @@ Released 2021-03-25
Released 2021-02-11
-[**View 90 PRs merged since 1.49**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-12-31..2021-02-11+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 119 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-11-06T18%3A32%3A40Z..2021-01-03T14%3A51%3A18Z+base%3Amaster)
### New Lints
@@ -2916,8 +2963,7 @@ Released 2021-02-11
Released 2020-12-31
-[**View 85 PRs merged since 1.48**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-11-19..2020-12-31+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 107 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-09-24T14%3A05%3A12Z..2020-11-05T13%3A35%3A44Z+base%3Amaster)
### New Lints
@@ -3023,7 +3069,7 @@ Released 2020-12-31
Released 2020-11-19
-[**View 112 PRs merged since 1.47**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-10-08..2020-11-19+base%3Amaster+sort%3Amerged-desc+)
+[View all 99 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-08-11T13%3A14%3A38Z..2020-09-23T18%3A55%3A22Z+base%3Amaster)
### New lints
@@ -3141,8 +3187,7 @@ Released 2020-11-19
Released 2020-10-08
-[**View 80 PRs merged since 1.46**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-08-27..2020-10-08+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-06-23T16%3A27%3A11Z..2020-08-11T12%3A52%3A41Z+base%3Amaster)
### New lints
@@ -3244,8 +3289,7 @@ Released 2020-10-08
Released 2020-08-27
-[**View 93 PRs merged since 1.45**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-07-16..2020-08-27+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 48 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-05-31T12%3A50%3A53Z..2020-06-23T15%3A00%3A32Z+base%3Amaster)
### New lints
@@ -3307,8 +3351,7 @@ Released 2020-08-27
Released 2020-07-16
-[**View 65 PRs merged since 1.44**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-06-04..2020-07-16+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 81 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-04-18T20%3A18%3A04Z..2020-05-27T19%3A25%3A04Z+base%3Amaster)
### New lints
@@ -3385,8 +3428,7 @@ and [`similar_names`]. [#5651](https://github.com/rust-lang/rust-clippy/pull/565
Released 2020-06-04
-[**View 88 PRs merged since 1.43**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-04-23..2020-06-04+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 124 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-03-05T17%3A30%3A53Z..2020-04-18T09%3A20%3A51Z+base%3Amaster)
### New lints
@@ -3469,8 +3511,7 @@ Released 2020-06-04
Released 2020-04-23
-[**View 121 PRs merged since 1.42**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-03-12..2020-04-23+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 91 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-01-26T16%3A01%3A11Z..2020-03-04T16%3A45%3A37Z+base%3Amaster)
### New lints
@@ -3528,7 +3569,7 @@ Released 2020-04-23
Released 2020-03-12
-[**View 106 PRs merged since 1.41**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-01-30..2020-03-12+base%3Amaster+sort%3Amerged-desc+)
+[View all 101 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-12-15T01%3A40%3A34Z..2020-01-26T11%3A22%3A13Z+base%3Amaster)
### New lints
@@ -3595,7 +3636,7 @@ Released 2020-03-12
Released 2020-01-30
-[**View 107 PRs merged since 1.40**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-12-19..2020-01-30+base%3Amaster+sort%3Amerged-desc+)
+[View all 74 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-10-28T20%3A50%3A24Z..2019-12-12T00%3A53%3A03Z+base%3Amaster)
* New Lints:
* [`exit`] [#4697](https://github.com/rust-lang/rust-clippy/pull/4697)
@@ -3640,8 +3681,7 @@ Released 2020-01-30
Released 2019-12-19
-[**View 69 😺 PRs merged since 1.39**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-11-07..2019-12-19+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-09-23T06%3A18%3A04Z..2019-10-28T17%3A34%3A55Z+base%3Amaster)
* New Lints:
* [`unneeded_wildcard_pattern`] [#4537](https://github.com/rust-lang/rust-clippy/pull/4537)
@@ -3683,7 +3723,7 @@ Released 2019-12-19
Released 2019-11-07
-[**View 84 PRs merged since 1.38**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-09-26..2019-11-07+base%3Amaster+sort%3Amerged-desc+)
+[View all 100 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-08-11T19%3A21%3A38Z..2019-09-22T12%3A07%3A39Z+base%3Amaster)
* New Lints:
* [`uninit_assumed_init`] [#4479](https://github.com/rust-lang/rust-clippy/pull/4479)
@@ -3727,7 +3767,7 @@ Released 2019-11-07
Released 2019-09-26
-[**View 102 PRs merged since 1.37**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-08-15..2019-09-26+base%3Amaster+sort%3Amerged-desc+)
+[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-06-30T13%3A40%3A26Z..2019-08-11T09%3A47%3A27Z+base%3Amaster)
* New Lints:
* [`main_recursion`] [#4203](https://github.com/rust-lang/rust-clippy/pull/4203)
@@ -3757,7 +3797,7 @@ Released 2019-09-26
Released 2019-08-15
-[**View 83 PRs merged since 1.36**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-07-04..2019-08-15+base%3Amaster+sort%3Amerged-desc+)
+[View all 72 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-05-19T08%3A11%3A23Z..2019-06-25T23%3A22%3A22Z+base%3Amaster)
* New Lints:
* [`checked_conversions`] [#4088](https://github.com/rust-lang/rust-clippy/pull/4088)
@@ -3781,8 +3821,7 @@ Released 2019-08-15
Released 2019-07-04
-[**View 75 PRs merged since 1.35**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-05-20..2019-07-04+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 81 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-04-10T09%3A41%3A56Z..2019-05-18T00%3A29%3A40Z+base%3Amaster)
* New lints: [`find_map`], [`filter_map_next`] [#4039](https://github.com/rust-lang/rust-clippy/pull/4039)
* New lint: [`path_buf_push_overwrite`] [#3954](https://github.com/rust-lang/rust-clippy/pull/3954)
@@ -3813,8 +3852,7 @@ Released 2019-07-04
Released 2019-05-20
-[**View 90 PRs merged since 1.34**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-04-10..2019-05-20+base%3Amaster+sort%3Amerged-desc+)
-
+[1fac380..37f5c1e](https://github.com/rust-lang/rust-clippy/compare/1fac380...37f5c1e)
* New lint: `drop_bounds` to detect `T: Drop` bounds
* Split [`redundant_closure`] into [`redundant_closure`] and [`redundant_closure_for_method_calls`] [#4110](https://github.com/rust-lang/rust-clippy/pull/4101)
@@ -3842,8 +3880,7 @@ Released 2019-05-20
Released 2019-04-10
-[**View 66 PRs merged since 1.33**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-02-26..2019-04-10+base%3Amaster+sort%3Amerged-desc+)
-
+[View all 61 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-01-17T17%3A45%3A39Z..2019-02-19T08%3A24%3A05Z+base%3Amaster)
* New lint: [`assertions_on_constants`] to detect for example `assert!(true)`
* New lint: [`dbg_macro`] to detect uses of the `dbg!` macro
@@ -3873,7 +3910,7 @@ Released 2019-04-10
Released 2019-02-26
-[**View 83 PRs merged since 1.32**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-01-17..2019-02-26+base%3Amaster+sort%3Amerged-desc+)
+[View all 120 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2018-11-28T06%3A19%3A50Z..2019-01-15T09%3A27%3A02Z+base%3Amaster)
* New lints: [`implicit_return`], [`vec_box`], [`cast_ref_to_mut`]
* The `rust-clippy` repository is now part of the `rust-lang` org.
@@ -3906,7 +3943,7 @@ Released 2019-02-26
Released 2019-01-17
-[**View 106 PRs merged since 1.31**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2018-12-06..2019-01-17+base%3Amaster+sort%3Amerged-desc+)
+[View all 71 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2018-10-24T05%3A02%3A21Z..2018-11-27T17%3A29%3A34Z+base%3Amaster)
* New lints: [`slow_vector_initialization`], `mem_discriminant_non_enum`,
[`redundant_clone`], [`wildcard_dependencies`],
@@ -3936,8 +3973,7 @@ Released 2019-01-17
Released 2018-12-06
-[**View 85 PRs merged since 1.30**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2018-10-25..2018-12-06+base%3Amaster+sort%3Amerged-desc+)
-
+[125907ad..2e26fdc2](https://github.com/rust-lang/rust-clippy/compare/125907ad..2e26fdc2)
* Clippy has been relicensed under a dual MIT / Apache license.
See [#3093](https://github.com/rust-lang/rust-clippy/issues/3093) for more
@@ -3977,9 +4013,7 @@ Released 2018-12-06
Released 2018-10-25
-[**View 106 PRs merged since 1.29**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2018-09-13..2018-10-25+base%3Amaster+sort%3Amerged-desc+)
-
-
+[View all 88 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2018-08-02T16%3A54%3A12Z..2018-09-17T09%3A44%3A06Z+base%3Amaster)
* Deprecate `assign_ops` lint
* New lints: [`mistyped_literal_suffixes`], [`ptr_offset_with_cast`],
[`needless_collect`], [`copy_iterator`]
@@ -4646,6 +4680,7 @@ Released 2018-09-13
<!-- lint disable no-unused-definitions -->
<!-- begin autogenerated links to lint list -->
+[`absolute_paths`]: https://rust-lang.github.io/rust-clippy/master/index.html#absolute_paths
[`absurd_extreme_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons
[`alloc_instead_of_core`]: https://rust-lang.github.io/rust-clippy/master/index.html#alloc_instead_of_core
[`allow_attributes`]: https://rust-lang.github.io/rust-clippy/master/index.html#allow_attributes
@@ -4785,6 +4820,7 @@ Released 2018-09-13
[`equatable_if_let`]: https://rust-lang.github.io/rust-clippy/master/index.html#equatable_if_let
[`erasing_op`]: https://rust-lang.github.io/rust-clippy/master/index.html#erasing_op
[`err_expect`]: https://rust-lang.github.io/rust-clippy/master/index.html#err_expect
+[`error_impl_error`]: https://rust-lang.github.io/rust-clippy/master/index.html#error_impl_error
[`eval_order_dependence`]: https://rust-lang.github.io/rust-clippy/master/index.html#eval_order_dependence
[`excessive_nesting`]: https://rust-lang.github.io/rust-clippy/master/index.html#excessive_nesting
[`excessive_precision`]: https://rust-lang.github.io/rust-clippy/master/index.html#excessive_precision
@@ -4808,6 +4844,7 @@ Released 2018-09-13
[`field_reassign_with_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#field_reassign_with_default
[`filetype_is_file`]: https://rust-lang.github.io/rust-clippy/master/index.html#filetype_is_file
[`filter_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map
+[`filter_map_bool_then`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map_bool_then
[`filter_map_identity`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map_identity
[`filter_map_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_map_next
[`filter_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#filter_next
@@ -4831,8 +4868,10 @@ Released 2018-09-13
[`forget_copy`]: https://rust-lang.github.io/rust-clippy/master/index.html#forget_copy
[`forget_non_drop`]: https://rust-lang.github.io/rust-clippy/master/index.html#forget_non_drop
[`forget_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#forget_ref
+[`format_collect`]: https://rust-lang.github.io/rust-clippy/master/index.html#format_collect
[`format_in_format_args`]: https://rust-lang.github.io/rust-clippy/master/index.html#format_in_format_args
[`format_push_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#format_push_string
+[`four_forward_slashes`]: https://rust-lang.github.io/rust-clippy/master/index.html#four_forward_slashes
[`from_iter_instead_of_collect`]: https://rust-lang.github.io/rust-clippy/master/index.html#from_iter_instead_of_collect
[`from_over_into`]: https://rust-lang.github.io/rust-clippy/master/index.html#from_over_into
[`from_raw_with_void_ptr`]: https://rust-lang.github.io/rust-clippy/master/index.html#from_raw_with_void_ptr
@@ -4851,16 +4890,19 @@ Released 2018-09-13
[`if_same_then_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_same_then_else
[`if_then_some_else_none`]: https://rust-lang.github.io/rust-clippy/master/index.html#if_then_some_else_none
[`ifs_same_cond`]: https://rust-lang.github.io/rust-clippy/master/index.html#ifs_same_cond
+[`ignored_unit_patterns`]: https://rust-lang.github.io/rust-clippy/master/index.html#ignored_unit_patterns
[`impl_trait_in_params`]: https://rust-lang.github.io/rust-clippy/master/index.html#impl_trait_in_params
[`implicit_clone`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_clone
[`implicit_hasher`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_hasher
[`implicit_return`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_return
[`implicit_saturating_add`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_saturating_add
[`implicit_saturating_sub`]: https://rust-lang.github.io/rust-clippy/master/index.html#implicit_saturating_sub
+[`impossible_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#impossible_comparisons
[`imprecise_flops`]: https://rust-lang.github.io/rust-clippy/master/index.html#imprecise_flops
[`inconsistent_digit_grouping`]: https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_digit_grouping
[`inconsistent_struct_constructor`]: https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_struct_constructor
[`incorrect_clone_impl_on_copy_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#incorrect_clone_impl_on_copy_type
+[`incorrect_partial_ord_impl_on_ord_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#incorrect_partial_ord_impl_on_ord_type
[`index_refutable_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice
[`indexing_slicing`]: https://rust-lang.github.io/rust-clippy/master/index.html#indexing_slicing
[`ineffective_bit_mask`]: https://rust-lang.github.io/rust-clippy/master/index.html#ineffective_bit_mask
@@ -4902,6 +4944,7 @@ Released 2018-09-13
[`iter_on_single_items`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_on_single_items
[`iter_overeager_cloned`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_overeager_cloned
[`iter_skip_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_next
+[`iter_skip_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_zero
[`iter_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_with_drain
[`iterator_step_by_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iterator_step_by_zero
[`just_underscores_and_digits`]: https://rust-lang.github.io/rust-clippy/master/index.html#just_underscores_and_digits
@@ -4941,6 +4984,8 @@ Released 2018-09-13
[`manual_flatten`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_flatten
[`manual_instant_elapsed`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_instant_elapsed
[`manual_is_ascii_check`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check
+[`manual_is_finite`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_finite
+[`manual_is_infinite`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_infinite
[`manual_let_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else
[`manual_main_separator_str`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_main_separator_str
[`manual_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_map
@@ -5047,6 +5092,7 @@ Released 2018-09-13
[`needless_option_as_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_option_as_deref
[`needless_option_take`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_option_take
[`needless_parens_on_range_literals`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_parens_on_range_literals
+[`needless_pass_by_ref_mut`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pass_by_ref_mut
[`needless_pass_by_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pass_by_value
[`needless_pub_self`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pub_self
[`needless_question_mark`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_question_mark
@@ -5054,6 +5100,7 @@ Released 2018-09-13
[`needless_raw_string_hashes`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_raw_string_hashes
[`needless_raw_strings`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_raw_strings
[`needless_return`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_return
+[`needless_return_with_question_mark`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_return_with_question_mark
[`needless_splitn`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_splitn
[`needless_update`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_update
[`neg_cmp_op_on_partial_ord`]: https://rust-lang.github.io/rust-clippy/master/index.html#neg_cmp_op_on_partial_ord
@@ -5134,7 +5181,9 @@ Released 2018-09-13
[`rc_buffer`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_buffer
[`rc_clone_in_vec_init`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_clone_in_vec_init
[`rc_mutex`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_mutex
+[`read_line_without_trim`]: https://rust-lang.github.io/rust-clippy/master/index.html#read_line_without_trim
[`read_zero_byte_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#read_zero_byte_vec
+[`readonly_write_lock`]: https://rust-lang.github.io/rust-clippy/master/index.html#readonly_write_lock
[`recursive_format_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#recursive_format_impl
[`redundant_allocation`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_allocation
[`redundant_async_block`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_async_block
@@ -5143,9 +5192,12 @@ Released 2018-09-13
[`redundant_closure`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure
[`redundant_closure_call`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_call
[`redundant_closure_for_method_calls`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
+[`redundant_comparisons`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_comparisons
[`redundant_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_else
[`redundant_feature_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_feature_names
[`redundant_field_names`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_field_names
+[`redundant_guards`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_guards
+[`redundant_locals`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_locals
[`redundant_pattern`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_pattern
[`redundant_pattern_matching`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_pattern_matching
[`redundant_pub_crate`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_pub_crate
@@ -5215,6 +5267,7 @@ Released 2018-09-13
[`string_extend_chars`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_extend_chars
[`string_from_utf8_as_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_from_utf8_as_bytes
[`string_lit_as_bytes`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_lit_as_bytes
+[`string_lit_chars_any`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_lit_chars_any
[`string_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_slice
[`string_to_string`]: https://rust-lang.github.io/rust-clippy/master/index.html#string_to_string
[`strlen_on_c_strings`]: https://rust-lang.github.io/rust-clippy/master/index.html#strlen_on_c_strings
@@ -5266,6 +5319,7 @@ Released 2018-09-13
[`try_err`]: https://rust-lang.github.io/rust-clippy/master/index.html#try_err
[`tuple_array_conversions`]: https://rust-lang.github.io/rust-clippy/master/index.html#tuple_array_conversions
[`type_complexity`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_complexity
+[`type_id_on_box`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_id_on_box
[`type_repetition_in_bounds`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_repetition_in_bounds
[`unchecked_duration_subtraction`]: https://rust-lang.github.io/rust-clippy/master/index.html#unchecked_duration_subtraction
[`undocumented_unsafe_blocks`]: https://rust-lang.github.io/rust-clippy/master/index.html#undocumented_unsafe_blocks
@@ -5322,6 +5376,7 @@ Released 2018-09-13
[`unused_unit`]: https://rust-lang.github.io/rust-clippy/master/index.html#unused_unit
[`unusual_byte_groupings`]: https://rust-lang.github.io/rust-clippy/master/index.html#unusual_byte_groupings
[`unwrap_in_result`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_in_result
+[`unwrap_or_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_or_default
[`unwrap_or_else_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_or_else_default
[`unwrap_used`]: https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_used
[`upper_case_acronyms`]: https://rust-lang.github.io/rust-clippy/master/index.html#upper_case_acronyms
@@ -5422,4 +5477,6 @@ Released 2018-09-13
[`accept-comment-above-statement`]: https://doc.rust-lang.org/clippy/lint_configuration.html#accept-comment-above-statement
[`accept-comment-above-attributes`]: https://doc.rust-lang.org/clippy/lint_configuration.html#accept-comment-above-attributes
[`allow-one-hash-in-raw-strings`]: https://doc.rust-lang.org/clippy/lint_configuration.html#allow-one-hash-in-raw-strings
+[`absolute-paths-max-segments`]: https://doc.rust-lang.org/clippy/lint_configuration.html#absolute-paths-max-segments
+[`absolute-paths-allowed-crates`]: https://doc.rust-lang.org/clippy/lint_configuration.html#absolute-paths-allowed-crates
<!-- end autogenerated links to configuration documentation -->
diff --git a/src/tools/clippy/Cargo.toml b/src/tools/clippy/Cargo.toml
index 76c804f93..0fb3a3a98 100644
--- a/src/tools/clippy/Cargo.toml
+++ b/src/tools/clippy/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy"
-version = "0.1.72"
+version = "0.1.73"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
@@ -36,6 +36,17 @@ walkdir = "2.3"
filetime = "0.2"
itertools = "0.10.1"
+# UI test dependencies
+clippy_utils = { path = "clippy_utils" }
+derive-new = "0.5"
+if_chain = "1.0"
+quote = "1.0"
+serde = { version = "1.0.125", features = ["derive"] }
+syn = { version = "2.0", features = ["full"] }
+futures = "0.3"
+parking_lot = "0.12"
+tokio = { version = "1", features = ["io-util"] }
+
[build-dependencies]
rustc_tools_util = "0.3.0"
diff --git a/src/tools/clippy/README.md b/src/tools/clippy/README.md
index d712d3e67..5d490645d 100644
--- a/src/tools/clippy/README.md
+++ b/src/tools/clippy/README.md
@@ -5,7 +5,7 @@
A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code.
-[There are over 600 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
+[There are over 650 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html).
You can choose how much Clippy is supposed to ~~annoy~~ help you by changing the lint level by category.
diff --git a/src/tools/clippy/book/src/README.md b/src/tools/clippy/book/src/README.md
index 3b6270962..486ea3df7 100644
--- a/src/tools/clippy/book/src/README.md
+++ b/src/tools/clippy/book/src/README.md
@@ -6,7 +6,7 @@
A collection of lints to catch common mistakes and improve your
[Rust](https://github.com/rust-lang/rust) code.
-[There are over 600 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
+[There are over 650 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
Lints are divided into categories, each with a default [lint
level](https://doc.rust-lang.org/rustc/lints/levels.html). You can choose how
diff --git a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
index 524454944..df9b1bbe1 100644
--- a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
+++ b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md
@@ -56,28 +56,6 @@ and open that file in your editor of choice.
When updating the changelog it's also a good idea to make sure that `commit1` is
already correct in the current changelog.
-#### PR ranges
-
-We developed the concept of PR ranges to help the user understand the size of a new update. To create a PR range,
-get the current release date and the date that the last version was released (YYYY-MM-DD) and use the following link:
-
-```
-[**View <NUMBER OF PRs> PRs merged since 1.<LAST VERSION NUM>**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A<LAST VERSION DATE>..<CURRENT VERSION DATE>+base%3Amaster+sort%3Amerged-desc+)
-```
-
-> Note: Be sure to check click the link and check how many PRs got merged between
-
-Example:
-
-```
-[**View 85 PRs merged since 1.69**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-04-20..2023-06-01+base%3Amaster+sort%3Amerged-desc+)
-```
-
-Which renders to:
-[**View 85 PRs merged since 1.69**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-04-20..2023-06-01+base%3Amaster+sort%3Amerged-desc+)
-
-Note that **commit ranges should not be included**, only PR ranges.
-
### 3. Authoring the final changelog
The above script should have dumped all the relevant PRs to the file you
diff --git a/src/tools/clippy/book/src/development/speedtest.md b/src/tools/clippy/book/src/development/speedtest.md
new file mode 100644
index 000000000..0db718e6a
--- /dev/null
+++ b/src/tools/clippy/book/src/development/speedtest.md
@@ -0,0 +1,24 @@
+# Speedtest
+`SPEEDTEST` is the tool we use to measure lint's performance, it works by executing the same test several times.
+
+It's useful for measuring changes to current lints and deciding if the performance changes too much. `SPEEDTEST` is
+accessed by the `SPEEDTEST` (and `SPEEDTEST_*`) environment variables.
+
+## Checking Speedtest
+
+To do a simple speed test of a lint (e.g. `allow_attributes`), use this command.
+
+```sh
+$ SPEEDTEST=ui TESTNAME="allow_attributes" cargo uitest -- --nocapture
+```
+
+This will test all `ui` tests (`SPEEDTEST=ui`) whose names start with `allow_attributes`. By default, `SPEEDTEST` will
+iterate your test 1000 times. But you can change this with `SPEEDTEST_ITERATIONS`.
+
+```sh
+$ SPEEDTEST=toml SPEEDTEST_ITERATIONS=100 TESTNAME="semicolon_block" cargo uitest -- --nocapture
+```
+
+> **WARNING**: Be sure to use `-- --nocapture` at the end of the command to see the average test time. If you don't
+> use `-- --nocapture` (e.g. `SPEEDTEST=ui` `TESTNAME="let_underscore_untyped" cargo uitest -- --nocapture`), this
+> will not show up.
diff --git a/src/tools/clippy/book/src/lint_configuration.md b/src/tools/clippy/book/src/lint_configuration.md
index 60d7ce6e6..caaad6d11 100644
--- a/src/tools/clippy/book/src/lint_configuration.md
+++ b/src/tools/clippy/book/src/lint_configuration.md
@@ -175,7 +175,7 @@ The maximum amount of nesting a block can reside in
## `disallowed-names`
The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value
-`".."` can be used as part of the list to indicate, that the configured values should be appended to the
+`".."` can be used as part of the list to indicate that the configured values should be appended to the
default configuration of Clippy. By default, any configuration will replace the default value.
**Default Value:** `["foo", "baz", "quux"]` (`Vec<String>`)
@@ -730,3 +730,24 @@ Whether to allow `r#""#` when `r""` can be used
* [`unnecessary_raw_string_hashes`](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_raw_string_hashes)
+## `absolute-paths-max-segments`
+The maximum number of segments a path can have before being linted, anything above this will
+be linted.
+
+**Default Value:** `2` (`u64`)
+
+---
+**Affected lints:**
+* [`absolute_paths`](https://rust-lang.github.io/rust-clippy/master/index.html#absolute_paths)
+
+
+## `absolute-paths-allowed-crates`
+Which crates to allow absolute paths from
+
+**Default Value:** `{}` (`rustc_data_structures::fx::FxHashSet<String>`)
+
+---
+**Affected lints:**
+* [`absolute_paths`](https://rust-lang.github.io/rust-clippy/master/index.html#absolute_paths)
+
+
diff --git a/src/tools/clippy/clippy_dev/src/lib.rs b/src/tools/clippy/clippy_dev/src/lib.rs
index 4624451cf..c4ae4f0e2 100644
--- a/src/tools/clippy/clippy_dev/src/lib.rs
+++ b/src/tools/clippy/clippy_dev/src/lib.rs
@@ -51,7 +51,7 @@ pub fn clippy_project_root() -> PathBuf {
for path in current_dir.ancestors() {
let result = std::fs::read_to_string(path.join("Cargo.toml"));
if let Err(err) = &result {
- if err.kind() == std::io::ErrorKind::NotFound {
+ if err.kind() == io::ErrorKind::NotFound {
continue;
}
}
diff --git a/src/tools/clippy/clippy_dev/src/main.rs b/src/tools/clippy/clippy_dev/src/main.rs
index 43eaccdf5..fca750faf 100644
--- a/src/tools/clippy/clippy_dev/src/main.rs
+++ b/src/tools/clippy/clippy_dev/src/main.rs
@@ -41,7 +41,7 @@ fn main() {
matches.get_one::<String>("type").map(String::as_str),
matches.get_flag("msrv"),
) {
- Ok(_) => update_lints::update(update_lints::UpdateMode::Change),
+ Ok(()) => update_lints::update(update_lints::UpdateMode::Change),
Err(e) => eprintln!("Unable to create lint: {e}"),
}
},
diff --git a/src/tools/clippy/clippy_dev/src/new_lint.rs b/src/tools/clippy/clippy_dev/src/new_lint.rs
index f11aa547b..e64cf2c87 100644
--- a/src/tools/clippy/clippy_dev/src/new_lint.rs
+++ b/src/tools/clippy/clippy_dev/src/new_lint.rs
@@ -96,8 +96,7 @@ fn create_test(lint: &LintData<'_>) -> io::Result<()> {
path.push("src");
fs::create_dir(&path)?;
- let header = format!("//@compile-flags: --crate-name={lint_name}");
- write_file(path.join("main.rs"), get_test_file_contents(lint_name, Some(&header)))?;
+ write_file(path.join("main.rs"), get_test_file_contents(lint_name))?;
Ok(())
}
@@ -113,7 +112,7 @@ fn create_test(lint: &LintData<'_>) -> io::Result<()> {
println!("Generated test directories: `{relative_test_dir}/pass`, `{relative_test_dir}/fail`");
} else {
let test_path = format!("tests/ui/{}.rs", lint.name);
- let test_contents = get_test_file_contents(lint.name, None);
+ let test_contents = get_test_file_contents(lint.name);
write_file(lint.project_root.join(&test_path), test_contents)?;
println!("Generated test file: `{test_path}`");
@@ -195,23 +194,16 @@ pub(crate) fn get_stabilization_version() -> String {
parse_manifest(&contents).expect("Unable to find package version in `Cargo.toml`")
}
-fn get_test_file_contents(lint_name: &str, header_commands: Option<&str>) -> String {
- let mut contents = formatdoc!(
+fn get_test_file_contents(lint_name: &str) -> String {
+ formatdoc!(
r#"
- #![allow(unused)]
#![warn(clippy::{lint_name})]
fn main() {{
// test code goes here
}}
"#
- );
-
- if let Some(header) = header_commands {
- contents = format!("{header}\n{contents}");
- }
-
- contents
+ )
}
fn get_manifest_contents(lint_name: &str, hint: &str) -> String {
@@ -358,6 +350,10 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R
let mod_file_path = ty_dir.join("mod.rs");
let context_import = setup_mod_file(&mod_file_path, lint)?;
+ let pass_lifetimes = match context_import {
+ "LateContext" => "<'_>",
+ _ => "",
+ };
let name_upper = lint.name.to_uppercase();
let mut lint_file_contents = String::new();
@@ -372,7 +368,7 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R
use super::{name_upper};
// TODO: Adjust the parameters as necessary
- pub(super) fn check(cx: &{context_import}, msrv: &Msrv) {{
+ pub(super) fn check(cx: &{context_import}{pass_lifetimes}, msrv: &Msrv) {{
if !msrv.meets(todo!("Add a new entry in `clippy_utils/src/msrvs`")) {{
return;
}}
@@ -389,7 +385,7 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R
use super::{name_upper};
// TODO: Adjust the parameters as necessary
- pub(super) fn check(cx: &{context_import}) {{
+ pub(super) fn check(cx: &{context_import}{pass_lifetimes}) {{
todo!();
}}
"#
diff --git a/src/tools/clippy/clippy_dev/src/setup/intellij.rs b/src/tools/clippy/clippy_dev/src/setup/intellij.rs
index efdb158c2..a7138f36a 100644
--- a/src/tools/clippy/clippy_dev/src/setup/intellij.rs
+++ b/src/tools/clippy/clippy_dev/src/setup/intellij.rs
@@ -37,7 +37,7 @@ impl ClippyProjectInfo {
pub fn setup_rustc_src(rustc_path: &str) {
let Ok(rustc_source_dir) = check_and_get_rustc_dir(rustc_path) else {
- return
+ return;
};
for project in CLIPPY_PROJECTS {
diff --git a/src/tools/clippy/clippy_dev/src/setup/vscode.rs b/src/tools/clippy/clippy_dev/src/setup/vscode.rs
index dbcdc9b59..204f4af2c 100644
--- a/src/tools/clippy/clippy_dev/src/setup/vscode.rs
+++ b/src/tools/clippy/clippy_dev/src/setup/vscode.rs
@@ -47,7 +47,7 @@ fn check_install_precondition(force_override: bool) -> bool {
}
} else {
match fs::create_dir(vs_dir_path) {
- Ok(_) => {
+ Ok(()) => {
println!("info: created `{VSCODE_DIR}` directory for clippy");
},
Err(err) => {
diff --git a/src/tools/clippy/clippy_dev/src/update_lints.rs b/src/tools/clippy/clippy_dev/src/update_lints.rs
index 7213c9dfe..7c2e06ea6 100644
--- a/src/tools/clippy/clippy_dev/src/update_lints.rs
+++ b/src/tools/clippy/clippy_dev/src/update_lints.rs
@@ -340,7 +340,10 @@ pub fn deprecate(name: &str, reason: Option<&String>) {
let name_upper = name.to_uppercase();
let (mut lints, deprecated_lints, renamed_lints) = gather_all();
- let Some(lint) = lints.iter().find(|l| l.name == name_lower) else { eprintln!("error: failed to find lint `{name}`"); return; };
+ let Some(lint) = lints.iter().find(|l| l.name == name_lower) else {
+ eprintln!("error: failed to find lint `{name}`");
+ return;
+ };
let mod_path = {
let mut mod_path = PathBuf::from(format!("clippy_lints/src/{}", lint.module));
diff --git a/src/tools/clippy/clippy_lints/Cargo.toml b/src/tools/clippy/clippy_lints/Cargo.toml
index c23054443..11136867f 100644
--- a/src/tools/clippy/clippy_lints/Cargo.toml
+++ b/src/tools/clippy/clippy_lints/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy_lints"
-version = "0.1.72"
+version = "0.1.73"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
diff --git a/src/tools/clippy/clippy_lints/src/absolute_paths.rs b/src/tools/clippy/clippy_lints/src/absolute_paths.rs
new file mode 100644
index 000000000..04417c4c4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/absolute_paths.rs
@@ -0,0 +1,100 @@
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::source::snippet_opt;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::{DefId, CRATE_DEF_INDEX};
+use rustc_hir::{HirId, ItemKind, Node, Path};
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::kw;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of items through absolute paths, like `std::env::current_dir`.
+ ///
+ /// ### Why is this bad?
+ /// Many codebases have their own style when it comes to importing, but one that is seldom used
+ /// is using absolute paths *everywhere*. This is generally considered unidiomatic, and you
+ /// should add a `use` statement.
+ ///
+ /// The default maximum segments (2) is pretty strict, you may want to increase this in
+ /// `clippy.toml`.
+ ///
+ /// Note: One exception to this is code from macro expansion - this does not lint such cases, as
+ /// using absolute paths is the proper way of referencing items in one.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let x = std::f64::consts::PI;
+ /// ```
+ /// Use any of the below instead, or anything else:
+ /// ```rust
+ /// use std::f64;
+ /// use std::f64::consts;
+ /// use std::f64::consts::PI;
+ /// let x = f64::consts::PI;
+ /// let x = consts::PI;
+ /// let x = PI;
+ /// use std::f64::consts as f64_consts;
+ /// let x = f64_consts::PI;
+ /// ```
+ #[clippy::version = "1.73.0"]
+ pub ABSOLUTE_PATHS,
+ restriction,
+ "checks for usage of an item without a `use` statement"
+}
+impl_lint_pass!(AbsolutePaths => [ABSOLUTE_PATHS]);
+
+pub struct AbsolutePaths {
+ pub absolute_paths_max_segments: u64,
+ pub absolute_paths_allowed_crates: FxHashSet<String>,
+}
+
+impl LateLintPass<'_> for AbsolutePaths {
+ // We should only lint `QPath::Resolved`s, but since `Path` is only used in `Resolved` and `UsePath`
+ // we don't need to use a visitor or anything as we can just check if the `Node` for `hir_id` isn't
+ // a `Use`
+ #[expect(clippy::cast_possible_truncation)]
+ fn check_path(&mut self, cx: &LateContext<'_>, path: &Path<'_>, hir_id: HirId) {
+ let Self {
+ absolute_paths_max_segments,
+ absolute_paths_allowed_crates,
+ } = self;
+
+ if !path.span.from_expansion()
+ && let Some(node) = cx.tcx.hir().find(hir_id)
+ && !matches!(node, Node::Item(item) if matches!(item.kind, ItemKind::Use(_, _)))
+ && let [first, rest @ ..] = path.segments
+ // Handle `::std`
+ && let (segment, len) = if first.ident.name == kw::PathRoot {
+ // Indexing is fine as `PathRoot` must be followed by another segment. `len() - 1`
+ // is fine here for the same reason
+ (&rest[0], path.segments.len() - 1)
+ } else {
+ (first, path.segments.len())
+ }
+ && len > *absolute_paths_max_segments as usize
+ && let Some(segment_snippet) = snippet_opt(cx, segment.ident.span)
+ && segment_snippet == segment.ident.as_str()
+ {
+ let is_abs_external =
+ matches!(segment.res, Res::Def(DefKind::Mod, DefId { index, .. }) if index == CRATE_DEF_INDEX);
+ let is_abs_crate = segment.ident.name == kw::Crate;
+
+ if is_abs_external && absolute_paths_allowed_crates.contains(segment.ident.name.as_str())
+ || is_abs_crate && absolute_paths_allowed_crates.contains("crate")
+ {
+ return;
+ }
+
+ if is_abs_external || is_abs_crate {
+ span_lint(
+ cx,
+ ABSOLUTE_PATHS,
+ path.span,
+ "consider bringing this path into scope with the `use` keyword",
+ );
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/allow_attributes.rs b/src/tools/clippy/clippy_lints/src/allow_attributes.rs
index eb2118471..e1ef514ed 100644
--- a/src/tools/clippy/clippy_lints/src/allow_attributes.rs
+++ b/src/tools/clippy/clippy_lints/src/allow_attributes.rs
@@ -1,5 +1,6 @@
use ast::{AttrStyle, Attribute};
-use clippy_utils::{diagnostics::span_lint_and_sugg, is_from_proc_macro};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_from_proc_macro;
use rustc_ast as ast;
use rustc_errors::Applicability;
use rustc_lint::{LateContext, LateLintPass, LintContext};
diff --git a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs
index 98ee8a9a8..35a04b5e4 100644
--- a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs
+++ b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::last_path_segment;
use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
+use clippy_utils::{is_from_proc_macro, last_path_segment};
use rustc_hir::{Expr, ExprKind};
-use rustc_lint::LateContext;
-use rustc_lint::LateLintPass;
+use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_middle::ty::print::with_forced_trimmed_paths;
use rustc_middle::ty::GenericArgKind;
@@ -39,10 +38,11 @@ declare_clippy_lint! {
}
declare_lint_pass!(ArcWithNonSendSync => [ARC_WITH_NON_SEND_SYNC]);
-impl LateLintPass<'_> for ArcWithNonSendSync {
- fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- let ty = cx.typeck_results().expr_ty(expr);
- if is_type_diagnostic_item(cx, ty, sym::Arc)
+impl<'tcx> LateLintPass<'tcx> for ArcWithNonSendSync {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if !expr.span.from_expansion()
+ && let ty = cx.typeck_results().expr_ty(expr)
+ && is_type_diagnostic_item(cx, ty, sym::Arc)
&& let ExprKind::Call(func, [arg]) = expr.kind
&& let ExprKind::Path(func_path) = func.kind
&& last_path_segment(&func_path).ident.name == sym::new
@@ -55,6 +55,7 @@ impl LateLintPass<'_> for ArcWithNonSendSync {
&& let Some(sync) = cx.tcx.lang_items().sync_trait()
&& let [is_send, is_sync] = [send, sync].map(|id| implements_trait(cx, arg_ty, id, &[]))
&& !(is_send && is_sync)
+ && !is_from_proc_macro(cx, expr)
{
span_lint_and_then(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
index a8dc0cb3b..b90914e93 100644
--- a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
+++ b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs
@@ -31,14 +31,20 @@ declare_lint_pass!(AssertionsOnConstants => [ASSERTIONS_ON_CONSTANTS]);
impl<'tcx> LateLintPass<'tcx> for AssertionsOnConstants {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, e) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, e) else {
+ return;
+ };
let is_debug = match cx.tcx.get_diagnostic_name(macro_call.def_id) {
Some(sym::debug_assert_macro) => true,
Some(sym::assert_macro) => false,
_ => return,
};
- let Some((condition, panic_expn)) = find_assert_args(cx, e, macro_call.expn) else { return };
- let Some(Constant::Bool(val)) = constant(cx, cx.typeck_results(), condition) else { return };
+ let Some((condition, panic_expn)) = find_assert_args(cx, e, macro_call.expn) else {
+ return;
+ };
+ let Some(Constant::Bool(val)) = constant(cx, cx.typeck_results(), condition) else {
+ return;
+ };
if val {
span_lint_and_help(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs b/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
index f6d6c23bb..2980c9d6d 100644
--- a/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
+++ b/src/tools/clippy/clippy_lints/src/assertions_on_result_states.rs
@@ -47,7 +47,7 @@ impl<'tcx> LateLintPass<'tcx> for AssertionsOnResultStates {
&& let result_type_with_refs = cx.typeck_results().expr_ty(recv)
&& let result_type = result_type_with_refs.peel_refs()
&& is_type_diagnostic_item(cx, result_type, sym::Result)
- && let ty::Adt(_, substs) = result_type.kind()
+ && let ty::Adt(_, args) = result_type.kind()
{
if !is_copy(cx, result_type) {
if result_type_with_refs != result_type {
@@ -61,7 +61,7 @@ impl<'tcx> LateLintPass<'tcx> for AssertionsOnResultStates {
let semicolon = if is_expr_final_block_expr(cx.tcx, e) {";"} else {""};
let mut app = Applicability::MachineApplicable;
match method_segment.ident.as_str() {
- "is_ok" if type_suitable_to_unwrap(cx, substs.type_at(1)) => {
+ "is_ok" if type_suitable_to_unwrap(cx, args.type_at(1)) => {
span_lint_and_sugg(
cx,
ASSERTIONS_ON_RESULT_STATES,
@@ -75,7 +75,7 @@ impl<'tcx> LateLintPass<'tcx> for AssertionsOnResultStates {
app,
);
}
- "is_err" if type_suitable_to_unwrap(cx, substs.type_at(0)) => {
+ "is_err" if type_suitable_to_unwrap(cx, args.type_at(0)) => {
span_lint_and_sugg(
cx,
ASSERTIONS_ON_RESULT_STATES,
diff --git a/src/tools/clippy/clippy_lints/src/attrs.rs b/src/tools/clippy/clippy_lints/src/attrs.rs
index 2ba78f995..2a5be2756 100644
--- a/src/tools/clippy/clippy_lints/src/attrs.rs
+++ b/src/tools/clippy/clippy_lints/src/attrs.rs
@@ -1,12 +1,10 @@
//! checks for attributes
+use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::is_from_proc_macro;
use clippy_utils::macros::{is_panic, macro_backtrace};
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::{first_line_of_span, is_present_in_source, snippet_opt, without_block_comments};
-use clippy_utils::{
- diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then},
- is_from_proc_macro,
-};
use if_chain::if_chain;
use rustc_ast::{AttrKind, AttrStyle, Attribute, LitKind, MetaItemKind, MetaItemLit, NestedMetaItem};
use rustc_errors::Applicability;
diff --git a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
index 9c0532474..1593d7b0f 100644
--- a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
+++ b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
-use clippy_utils::get_parent_expr;
-use clippy_utils::higher;
use clippy_utils::source::snippet_block_with_applicability;
use clippy_utils::ty::implements_trait;
use clippy_utils::visitors::{for_each_expr, Descend};
+use clippy_utils::{get_parent_expr, higher};
use core::ops::ControlFlow;
use if_chain::if_chain;
use rustc_errors::Applicability;
@@ -85,8 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for BlocksInIfConditions {
);
}
} else {
- let span =
- block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span);
+ let span = block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span);
if span.from_expansion() || expr.span.from_expansion() {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
index e8775b081..450359771 100644
--- a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
+++ b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs
@@ -61,7 +61,7 @@ fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -
)
})
.map_or(false, |assoc_item| {
- let proj = Ty::new_projection(cx.tcx,assoc_item.def_id, cx.tcx.mk_substs_trait(ty, []));
+ let proj = Ty::new_projection(cx.tcx, assoc_item.def_id, cx.tcx.mk_args_trait(ty, []));
let nty = cx.tcx.normalize_erasing_regions(cx.param_env, proj);
nty.is_bool()
@@ -70,14 +70,18 @@ fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -
impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
let macro_name = cx.tcx.item_name(macro_call.def_id);
let eq_macro = match macro_name.as_str() {
"assert_eq" | "debug_assert_eq" => true,
"assert_ne" | "debug_assert_ne" => false,
_ => return,
};
- let Some ((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return };
+ let Some((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else {
+ return;
+ };
let a_span = a.span.source_callsite();
let b_span = b.span.source_callsite();
@@ -126,7 +130,9 @@ impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison {
let mut suggestions = vec![(name_span, non_eq_mac.to_string()), (lit_span, String::new())];
if bool_value ^ eq_macro {
- let Some(sugg) = Sugg::hir_opt(cx, non_lit_expr) else { return };
+ let Some(sugg) = Sugg::hir_opt(cx, non_lit_expr) else {
+ return;
+ };
suggestions.push((non_lit_expr.span, (!sugg).to_string()));
}
diff --git a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs
index bdb3a0116..1828dd651 100644
--- a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs
+++ b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs
@@ -4,7 +4,9 @@ use rustc_hir::{Block, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use clippy_utils::{diagnostics::span_lint_and_then, in_constant, is_else_clause, is_integer_literal, sugg::Sugg};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{in_constant, is_else_clause, is_integer_literal};
use rustc_errors::Applicability;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
index 814108ed8..b3dbbb08f 100644
--- a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs
@@ -1,9 +1,8 @@
use crate::reference::DEREF_ADDROF;
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::is_from_proc_macro;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::implements_trait;
-use clippy_utils::{get_parent_expr, is_lint_allowed};
+use clippy_utils::{get_parent_expr, is_from_proc_macro, is_lint_allowed};
use rustc_errors::Applicability;
use rustc_hir::{ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/box_default.rs b/src/tools/clippy/clippy_lints/src/box_default.rs
index e42c3fe24..fa9c525fc 100644
--- a/src/tools/clippy/clippy_lints/src/box_default.rs
+++ b/src/tools/clippy/clippy_lints/src/box_default.rs
@@ -1,12 +1,10 @@
-use clippy_utils::{
- diagnostics::span_lint_and_sugg, get_parent_node, is_default_equivalent, macros::macro_backtrace, match_path,
- path_def_id, paths, ty::expr_sig,
-};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::macro_backtrace;
+use clippy_utils::ty::expr_sig;
+use clippy_utils::{get_parent_node, is_default_equivalent, match_path, path_def_id, paths};
use rustc_errors::Applicability;
-use rustc_hir::{
- intravisit::{walk_ty, Visitor},
- Block, Expr, ExprKind, Local, Node, QPath, TyKind,
-};
+use rustc_hir::intravisit::{walk_ty, Visitor};
+use rustc_hir::{Block, Expr, ExprKind, Local, Node, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::print::with_forced_trimmed_paths;
diff --git a/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs b/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs
index 1633ffd58..1e56ed5f4 100644
--- a/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs
@@ -3,10 +3,8 @@ use clippy_utils::source::snippet_opt;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
-use rustc_middle::{
- mir::Mutability,
- ty::{self, Ty, TypeAndMut},
-};
+use rustc_middle::mir::Mutability;
+use rustc_middle::ty::{self, Ty, TypeAndMut};
use super::AS_PTR_CAST_MUT;
@@ -17,7 +15,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>,
&& let ExprKind::MethodCall(method_name, receiver, [], _) = cast_expr.peel_blocks().kind
&& method_name.ident.name == rustc_span::sym::as_ptr
&& let Some(as_ptr_did) = cx.typeck_results().type_dependent_def_id(cast_expr.peel_blocks().hir_id)
- && let as_ptr_sig = cx.tcx.fn_sig(as_ptr_did).subst_identity()
+ && let as_ptr_sig = cx.tcx.fn_sig(as_ptr_did).instantiate_identity()
&& let Some(first_param_ty) = as_ptr_sig.skip_binder().inputs().iter().next()
&& let ty::Ref(_, _, Mutability::Not) = first_param_ty.kind()
&& let Some(recv) = snippet_opt(cx, receiver.span)
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs
index 6c8ee296c..5bf467efa 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs
@@ -66,7 +66,7 @@ fn is_used_as_unaligned(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
if matches!(name.ident.as_str(), "read_unaligned" | "write_unaligned")
&& let Some(def_id) = cx.typeck_results().type_dependent_def_id(parent.hir_id)
&& let Some(def_id) = cx.tcx.impl_of_method(def_id)
- && cx.tcx.type_of(def_id).subst_identity().is_unsafe_ptr()
+ && cx.tcx.type_of(def_id).instantiate_identity().is_unsafe_ptr()
{
true
} else {
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
index 27cc5a1c3..4d9cc4cac 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs
@@ -1,10 +1,12 @@
+use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::{diagnostics::span_lint_and_then, source};
+use clippy_utils::source;
use if_chain::if_chain;
use rustc_ast::Mutability;
use rustc_hir::{Expr, ExprKind, Node};
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, layout::LayoutOf, Ty, TypeAndMut};
+use rustc_middle::ty::layout::LayoutOf;
+use rustc_middle::ty::{self, Ty, TypeAndMut};
use super::CAST_SLICE_DIFFERENT_SIZES;
diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
index 1233c632a..5e0123842 100644
--- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs
@@ -4,7 +4,8 @@ use clippy_utils::source::snippet_with_context;
use clippy_utils::{match_def_path, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{def_id::DefId, Expr, ExprKind};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
diff --git a/src/tools/clippy/clippy_lints/src/casts/mod.rs b/src/tools/clippy/clippy_lints/src/casts/mod.rs
index 0ac6ef649..d34de305f 100644
--- a/src/tools/clippy/clippy_lints/src/casts/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/mod.rs
@@ -181,6 +181,14 @@ declare_clippy_lint! {
/// ### Why is this bad?
/// It's just unnecessary.
///
+ /// ### Known problems
+ /// When the expression on the left is a function call, the lint considers the return type to be
+ /// a type alias if it's aliased through a `use` statement
+ /// (like `use std::io::Result as IoResult`). It will not lint such cases.
+ ///
+ /// This check is also rather primitive. It will only work on primitive types without any
+ /// intermediate references, raw pointers and trait objects may or may not work.
+ ///
/// ### Example
/// ```rust
/// let _ = 2i32 as i32;
diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
index 15ffb00da..181dbcf6e 100644
--- a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs
@@ -1,9 +1,7 @@
-use std::borrow::Cow;
-
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::msrvs::{self, Msrv};
+use clippy_utils::source::snippet_with_applicability;
use clippy_utils::sugg::Sugg;
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Mutability, TyKind};
use rustc_lint::LateContext;
@@ -16,33 +14,41 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: &Msrv) {
return;
}
- if_chain! {
- if let ExprKind::Cast(cast_expr, cast_to_hir_ty) = expr.kind;
- let (cast_from, cast_to) = (cx.typeck_results().expr_ty(cast_expr), cx.typeck_results().expr_ty(expr));
- if let ty::RawPtr(TypeAndMut { mutbl: from_mutbl, .. }) = cast_from.kind();
- if let ty::RawPtr(TypeAndMut { ty: to_pointee_ty, mutbl: to_mutbl }) = cast_to.kind();
- if matches!((from_mutbl, to_mutbl),
- (Mutability::Not, Mutability::Not) | (Mutability::Mut, Mutability::Mut));
+ if let ExprKind::Cast(cast_expr, cast_to_hir_ty) = expr.kind
+ && let (cast_from, cast_to) = (cx.typeck_results().expr_ty(cast_expr), cx.typeck_results().expr_ty(expr))
+ && let ty::RawPtr(TypeAndMut { mutbl: from_mutbl, .. }) = cast_from.kind()
+ && let ty::RawPtr(TypeAndMut { ty: to_pointee_ty, mutbl: to_mutbl }) = cast_to.kind()
+ && matches!((from_mutbl, to_mutbl),
+ (Mutability::Not, Mutability::Not) | (Mutability::Mut, Mutability::Mut))
// The `U` in `pointer::cast` have to be `Sized`
// as explained here: https://github.com/rust-lang/rust/issues/60602.
- if to_pointee_ty.is_sized(cx.tcx, cx.param_env);
- then {
- let mut applicability = Applicability::MachineApplicable;
- let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut applicability);
- let turbofish = match &cast_to_hir_ty.kind {
- TyKind::Infer => Cow::Borrowed(""),
- TyKind::Ptr(mut_ty) if matches!(mut_ty.ty.kind, TyKind::Infer) => Cow::Borrowed(""),
- _ => Cow::Owned(format!("::<{to_pointee_ty}>")),
- };
- span_lint_and_sugg(
- cx,
- PTR_AS_PTR,
- expr.span,
- "`as` casting between raw pointers without changing its mutability",
- "try `pointer::cast`, a safer alternative",
- format!("{}.cast{turbofish}()", cast_expr_sugg.maybe_par()),
- applicability,
- );
- }
+ && to_pointee_ty.is_sized(cx.tcx, cx.param_env)
+ {
+ let mut app = Applicability::MachineApplicable;
+ let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut app);
+ let turbofish = match &cast_to_hir_ty.kind {
+ TyKind::Infer => String::new(),
+ TyKind::Ptr(mut_ty) => {
+ if matches!(mut_ty.ty.kind, TyKind::Infer) {
+ String::new()
+ } else {
+ format!(
+ "::<{}>",
+ snippet_with_applicability(cx, mut_ty.ty.span, "/* type */", &mut app)
+ )
+ }
+ },
+ _ => return,
+ };
+
+ span_lint_and_sugg(
+ cx,
+ PTR_AS_PTR,
+ expr.span,
+ "`as` casting between raw pointers without changing its mutability",
+ "try `pointer::cast`, a safer alternative",
+ format!("{}.cast{turbofish}()", cast_expr_sugg.maybe_par()),
+ app,
+ );
}
}
diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs
index f0c1df014..ce1ab1091 100644
--- a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs
@@ -1,6 +1,6 @@
-use clippy_utils::msrvs::POINTER_CAST_CONSTNESS;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::msrvs::{Msrv, POINTER_CAST_CONSTNESS};
use clippy_utils::sugg::Sugg;
-use clippy_utils::{diagnostics::span_lint_and_sugg, msrvs::Msrv};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, Mutability};
diff --git a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
index 71cf2aea0..86057bb74 100644
--- a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
+++ b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs
@@ -56,7 +56,7 @@ pub(super) fn check<'tcx>(
&format!("casting raw pointers to the same type and constness is unnecessary (`{cast_from}` -> `{cast_to}`)"),
"try",
cast_str.clone(),
- Applicability::MachineApplicable,
+ Applicability::MaybeIncorrect,
);
}
}
@@ -85,11 +85,6 @@ pub(super) fn check<'tcx>(
}
}
- // skip cast of fn call that returns type alias
- if let ExprKind::Cast(inner, ..) = expr.kind && is_cast_from_ty_alias(cx, inner, cast_from) {
- return false;
- }
-
// skip cast to non-primitive type
if_chain! {
if let ExprKind::Cast(_, cast_to) = expr.kind;
@@ -101,6 +96,11 @@ pub(super) fn check<'tcx>(
}
}
+ // skip cast of fn call that returns type alias
+ if let ExprKind::Cast(inner, ..) = expr.kind && is_cast_from_ty_alias(cx, inner, cast_from) {
+ return false;
+ }
+
if let Some(lit) = get_numeric_literal(cast_expr) {
let literal_str = &cast_str;
@@ -254,14 +254,12 @@ fn is_cast_from_ty_alias<'tcx>(cx: &LateContext<'tcx>, expr: impl Visitable<'tcx
// function's declaration snippet is exactly equal to the `Ty`. That way, we can
// see whether it's a type alias.
//
- // Will this work for more complex types? Probably not!
+ // FIXME: This won't work if the type is given an alias through `use`, should we
+ // consider this a type alias as well?
if !snippet
.split("->")
- .skip(0)
- .map(|s| {
- s.trim() == cast_from.to_string()
- || s.split("where").any(|ty| ty.trim() == cast_from.to_string())
- })
+ .skip(1)
+ .map(|s| snippet_eq_ty(s, cast_from) || s.split("where").any(|ty| snippet_eq_ty(ty, cast_from)))
.any(|a| a)
{
return ControlFlow::Break(());
@@ -288,3 +286,7 @@ fn is_cast_from_ty_alias<'tcx>(cx: &LateContext<'tcx>, expr: impl Visitable<'tcx
})
.is_some()
}
+
+fn snippet_eq_ty(snippet: &str, ty: Ty<'_>) -> bool {
+ snippet.trim() == ty.to_string() || snippet.trim().contains(&format!("::{ty}"))
+}
diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs
index 1c321f46e..e3a09636e 100644
--- a/src/tools/clippy/clippy_lints/src/copies.rs
+++ b/src/tools/clippy/clippy_lints/src/copies.rs
@@ -10,8 +10,7 @@ use core::iter;
use core::ops::ControlFlow;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefIdSet;
-use rustc_hir::intravisit;
-use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind};
+use rustc_hir::{intravisit, BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::query::Key;
use rustc_session::{declare_tool_lint, impl_lint_pass};
diff --git a/src/tools/clippy/clippy_lints/src/copy_iterator.rs b/src/tools/clippy/clippy_lints/src/copy_iterator.rs
index 0fc115232..5d04ad011 100644
--- a/src/tools/clippy/clippy_lints/src/copy_iterator.rs
+++ b/src/tools/clippy/clippy_lints/src/copy_iterator.rs
@@ -43,7 +43,7 @@ impl<'tcx> LateLintPass<'tcx> for CopyIterator {
of_trait: Some(ref trait_ref),
..
}) = item.kind;
- let ty = cx.tcx.type_of(item.owner_id).subst_identity();
+ let ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
if is_copy(cx, ty);
if let Some(trait_id) = trait_ref.trait_def_id();
if cx.tcx.is_diagnostic_item(sym::Iterator, trait_id);
diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
index 7436e9ce8..726674d88 100644
--- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
+++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
@@ -5,7 +5,8 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{symbol::sym, Span};
+use rustc_span::symbol::sym;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/dbg_macro.rs b/src/tools/clippy/clippy_lints/src/dbg_macro.rs
index ea17e7a60..49452136d 100644
--- a/src/tools/clippy/clippy_lints/src/dbg_macro.rs
+++ b/src/tools/clippy/clippy_lints/src/dbg_macro.rs
@@ -71,7 +71,9 @@ impl DbgMacro {
impl LateLintPass<'_> for DbgMacro {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
if cx.tcx.is_diagnostic_item(sym::dbg_macro, macro_call.def_id) {
// allows `dbg!` in test code if allow-dbg-in-test is set to true in clippy.toml
if self.allow_dbg_in_tests
diff --git a/src/tools/clippy/clippy_lints/src/declared_lints.rs b/src/tools/clippy/clippy_lints/src/declared_lints.rs
index 9d9ee6ba3..db114abfc 100644
--- a/src/tools/clippy/clippy_lints/src/declared_lints.rs
+++ b/src/tools/clippy/clippy_lints/src/declared_lints.rs
@@ -37,6 +37,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::utils::internal_lints::produce_ice::PRODUCE_ICE_INFO,
#[cfg(feature = "internal")]
crate::utils::internal_lints::unnecessary_def_path::UNNECESSARY_DEF_PATH_INFO,
+ crate::absolute_paths::ABSOLUTE_PATHS_INFO,
crate::allow_attributes::ALLOW_ATTRIBUTES_INFO,
crate::almost_complete_range::ALMOST_COMPLETE_RANGE_INFO,
crate::approx_const::APPROX_CONSTANT_INFO,
@@ -155,6 +156,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::enum_variants::MODULE_INCEPTION_INFO,
crate::enum_variants::MODULE_NAME_REPETITIONS_INFO,
crate::equatable_if_let::EQUATABLE_IF_LET_INFO,
+ crate::error_impl_error::ERROR_IMPL_ERROR_INFO,
crate::escape::BOXED_LOCAL_INFO,
crate::eta_reduction::REDUNDANT_CLOSURE_INFO,
crate::eta_reduction::REDUNDANT_CLOSURE_FOR_METHOD_CALLS_INFO,
@@ -171,7 +173,6 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::float_literal::LOSSY_FLOAT_LITERAL_INFO,
crate::floating_point_arithmetic::IMPRECISE_FLOPS_INFO,
crate::floating_point_arithmetic::SUBOPTIMAL_FLOPS_INFO,
- crate::fn_null_check::FN_NULL_CHECK_INFO,
crate::format::USELESS_FORMAT_INFO,
crate::format_args::FORMAT_IN_FORMAT_ARGS_INFO,
crate::format_args::TO_STRING_IN_FORMAT_ARGS_INFO,
@@ -184,6 +185,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::formatting::SUSPICIOUS_ASSIGNMENT_FORMATTING_INFO,
crate::formatting::SUSPICIOUS_ELSE_FORMATTING_INFO,
crate::formatting::SUSPICIOUS_UNARY_OP_FORMATTING_INFO,
+ crate::four_forward_slashes::FOUR_FORWARD_SLASHES_INFO,
crate::from_over_into::FROM_OVER_INTO_INFO,
crate::from_raw_with_void_ptr::FROM_RAW_WITH_VOID_PTR_INFO,
crate::from_str_radix_10::FROM_STR_RADIX_10_INFO,
@@ -201,12 +203,14 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::if_let_mutex::IF_LET_MUTEX_INFO,
crate::if_not_else::IF_NOT_ELSE_INFO,
crate::if_then_some_else_none::IF_THEN_SOME_ELSE_NONE_INFO,
+ crate::ignored_unit_patterns::IGNORED_UNIT_PATTERNS_INFO,
crate::implicit_hasher::IMPLICIT_HASHER_INFO,
crate::implicit_return::IMPLICIT_RETURN_INFO,
crate::implicit_saturating_add::IMPLICIT_SATURATING_ADD_INFO,
crate::implicit_saturating_sub::IMPLICIT_SATURATING_SUB_INFO,
crate::inconsistent_struct_constructor::INCONSISTENT_STRUCT_CONSTRUCTOR_INFO,
crate::incorrect_impls::INCORRECT_CLONE_IMPL_ON_COPY_TYPE_INFO,
+ crate::incorrect_impls::INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE_INFO,
crate::index_refutable_slice::INDEX_REFUTABLE_SLICE_INFO,
crate::indexing_slicing::INDEXING_SLICING_INFO,
crate::indexing_slicing::OUT_OF_BOUNDS_INDEXING_INFO,
@@ -273,6 +277,8 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::manual_async_fn::MANUAL_ASYNC_FN_INFO,
crate::manual_bits::MANUAL_BITS_INFO,
crate::manual_clamp::MANUAL_CLAMP_INFO,
+ crate::manual_float_methods::MANUAL_IS_FINITE_INFO,
+ crate::manual_float_methods::MANUAL_IS_INFINITE_INFO,
crate::manual_is_ascii_check::MANUAL_IS_ASCII_CHECK_INFO,
crate::manual_let_else::MANUAL_LET_ELSE_INFO,
crate::manual_main_separator_str::MANUAL_MAIN_SEPARATOR_STR_INFO,
@@ -303,6 +309,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::matches::MATCH_WILDCARD_FOR_SINGLE_VARIANTS_INFO,
crate::matches::MATCH_WILD_ERR_ARM_INFO,
crate::matches::NEEDLESS_MATCH_INFO,
+ crate::matches::REDUNDANT_GUARDS_INFO,
crate::matches::REDUNDANT_PATTERN_MATCHING_INFO,
crate::matches::REST_PAT_IN_FULLY_BOUND_STRUCTS_INFO,
crate::matches::SIGNIFICANT_DROP_IN_SCRUTINEE_INFO,
@@ -331,11 +338,13 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::EXPECT_USED_INFO,
crate::methods::EXTEND_WITH_DRAIN_INFO,
crate::methods::FILETYPE_IS_FILE_INFO,
+ crate::methods::FILTER_MAP_BOOL_THEN_INFO,
crate::methods::FILTER_MAP_IDENTITY_INFO,
crate::methods::FILTER_MAP_NEXT_INFO,
crate::methods::FILTER_NEXT_INFO,
crate::methods::FLAT_MAP_IDENTITY_INFO,
crate::methods::FLAT_MAP_OPTION_INFO,
+ crate::methods::FORMAT_COLLECT_INFO,
crate::methods::FROM_ITER_INSTEAD_OF_COLLECT_INFO,
crate::methods::GET_FIRST_INFO,
crate::methods::GET_LAST_WITH_LEN_INFO,
@@ -356,6 +365,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::ITER_ON_SINGLE_ITEMS_INFO,
crate::methods::ITER_OVEREAGER_CLONED_INFO,
crate::methods::ITER_SKIP_NEXT_INFO,
+ crate::methods::ITER_SKIP_ZERO_INFO,
crate::methods::ITER_WITH_DRAIN_INFO,
crate::methods::MANUAL_FILTER_MAP_INFO,
crate::methods::MANUAL_FIND_MAP_INFO,
@@ -389,6 +399,8 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::OR_THEN_UNWRAP_INFO,
crate::methods::PATH_BUF_PUSH_OVERWRITE_INFO,
crate::methods::RANGE_ZIP_WITH_LEN_INFO,
+ crate::methods::READONLY_WRITE_LOCK_INFO,
+ crate::methods::READ_LINE_WITHOUT_TRIM_INFO,
crate::methods::REPEAT_ONCE_INFO,
crate::methods::RESULT_MAP_OR_INTO_OPTION_INFO,
crate::methods::SEARCH_IS_SOME_INFO,
@@ -400,10 +412,12 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::SKIP_WHILE_NEXT_INFO,
crate::methods::STABLE_SORT_PRIMITIVE_INFO,
crate::methods::STRING_EXTEND_CHARS_INFO,
+ crate::methods::STRING_LIT_CHARS_ANY_INFO,
crate::methods::SUSPICIOUS_COMMAND_ARG_SPACE_INFO,
crate::methods::SUSPICIOUS_MAP_INFO,
crate::methods::SUSPICIOUS_SPLITN_INFO,
crate::methods::SUSPICIOUS_TO_OWNED_INFO,
+ crate::methods::TYPE_ID_ON_BOX_INFO,
crate::methods::UNINIT_ASSUMED_INIT_INFO,
crate::methods::UNIT_HASH_INFO,
crate::methods::UNNECESSARY_FILTER_MAP_INFO,
@@ -414,7 +428,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::methods::UNNECESSARY_LITERAL_UNWRAP_INFO,
crate::methods::UNNECESSARY_SORT_BY_INFO,
crate::methods::UNNECESSARY_TO_OWNED_INFO,
- crate::methods::UNWRAP_OR_ELSE_DEFAULT_INFO,
+ crate::methods::UNWRAP_OR_DEFAULT_INFO,
crate::methods::UNWRAP_USED_INFO,
crate::methods::USELESS_ASREF_INFO,
crate::methods::VEC_RESIZE_TO_ZERO_INFO,
@@ -469,6 +483,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::needless_if::NEEDLESS_IF_INFO,
crate::needless_late_init::NEEDLESS_LATE_INIT_INFO,
crate::needless_parens_on_range_literals::NEEDLESS_PARENS_ON_RANGE_LITERALS_INFO,
+ crate::needless_pass_by_ref_mut::NEEDLESS_PASS_BY_REF_MUT_INFO,
crate::needless_pass_by_value::NEEDLESS_PASS_BY_VALUE_INFO,
crate::needless_question_mark::NEEDLESS_QUESTION_MARK_INFO,
crate::needless_update::NEEDLESS_UPDATE_INFO,
@@ -503,6 +518,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::operators::FLOAT_CMP_CONST_INFO,
crate::operators::FLOAT_EQUALITY_WITHOUT_ABS_INFO,
crate::operators::IDENTITY_OP_INFO,
+ crate::operators::IMPOSSIBLE_COMPARISONS_INFO,
crate::operators::INEFFECTIVE_BIT_MASK_INFO,
crate::operators::INTEGER_DIVISION_INFO,
crate::operators::MISREFACTORED_ASSIGN_OP_INFO,
@@ -511,6 +527,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::operators::NEEDLESS_BITWISE_BOOL_INFO,
crate::operators::OP_REF_INFO,
crate::operators::PTR_EQ_INFO,
+ crate::operators::REDUNDANT_COMPARISONS_INFO,
crate::operators::SELF_ASSIGNMENT_INFO,
crate::operators::VERBOSE_BIT_MASK_INFO,
crate::option_env_unwrap::OPTION_ENV_UNWRAP_INFO,
@@ -550,6 +567,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::redundant_closure_call::REDUNDANT_CLOSURE_CALL_INFO,
crate::redundant_else::REDUNDANT_ELSE_INFO,
crate::redundant_field_names::REDUNDANT_FIELD_NAMES_INFO,
+ crate::redundant_locals::REDUNDANT_LOCALS_INFO,
crate::redundant_pub_crate::REDUNDANT_PUB_CRATE_INFO,
crate::redundant_slicing::DEREF_BY_SLICING_INFO,
crate::redundant_slicing::REDUNDANT_SLICING_INFO,
@@ -563,6 +581,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::return_self_not_must_use::RETURN_SELF_NOT_MUST_USE_INFO,
crate::returns::LET_AND_RETURN_INFO,
crate::returns::NEEDLESS_RETURN_INFO,
+ crate::returns::NEEDLESS_RETURN_WITH_QUESTION_MARK_INFO,
crate::same_name_method::SAME_NAME_METHOD_INFO,
crate::self_named_constructors::SELF_NAMED_CONSTRUCTORS_INFO,
crate::semicolon_block::SEMICOLON_INSIDE_BLOCK_INFO,
diff --git a/src/tools/clippy/clippy_lints/src/default.rs b/src/tools/clippy/clippy_lints/src/default.rs
index 80c22742b..763ad0264 100644
--- a/src/tools/clippy/clippy_lints/src/default.rs
+++ b/src/tools/clippy/clippy_lints/src/default.rs
@@ -150,7 +150,7 @@ impl<'tcx> LateLintPass<'tcx> for Default {
.fields
.iter()
.all(|field| {
- is_copy(cx, cx.tcx.type_of(field.did).subst_identity())
+ is_copy(cx, cx.tcx.type_of(field.did).instantiate_identity())
});
if !has_drop(cx, binding_type) || all_fields_are_copy;
then {
@@ -219,11 +219,11 @@ impl<'tcx> LateLintPass<'tcx> for Default {
// give correct suggestion if generics are involved (see #6944)
let binding_type = if_chain! {
- if let ty::Adt(adt_def, substs) = binding_type.kind();
- if !substs.is_empty();
+ if let ty::Adt(adt_def, args) = binding_type.kind();
+ if !args.is_empty();
then {
let adt_def_ty_name = cx.tcx.item_name(adt_def.did());
- let generic_args = substs.iter().collect::<Vec<_>>();
+ let generic_args = args.iter().collect::<Vec<_>>();
let tys_str = generic_args
.iter()
.map(ToString::to_string)
diff --git a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs
index ca9514ccc..a294c6937 100644
--- a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs
+++ b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs
@@ -1,5 +1,7 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, is_ty_alias, match_def_path, paths};
-use hir::{def::Res, ExprKind};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::{is_ty_alias, match_def_path, paths};
+use hir::def::Res;
+use hir::ExprKind;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
index f296b80d2..572990aab 100644
--- a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
+++ b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::last_path_segment;
use clippy_utils::source::snippet_with_context;
-use clippy_utils::{match_def_path, paths};
+use clippy_utils::{last_path_segment, match_def_path, paths};
use rustc_errors::Applicability;
use rustc_hir::{def, Expr, ExprKind, GenericArg, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
index e53a9877b..d09428dbc 100644
--- a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
+++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs
@@ -4,15 +4,11 @@ use clippy_utils::{get_parent_node, numeric_literal};
use if_chain::if_chain;
use rustc_ast::ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::Applicability;
-use rustc_hir::{
- intravisit::{walk_expr, walk_stmt, Visitor},
- Body, Expr, ExprKind, HirId, ItemKind, Lit, Node, Stmt, StmtKind,
-};
+use rustc_hir::intravisit::{walk_expr, walk_stmt, Visitor};
+use rustc_hir::{Body, Expr, ExprKind, HirId, ItemKind, Lit, Node, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::{
- lint::in_external_macro,
- ty::{self, FloatTy, IntTy, PolyFnSig, Ty},
-};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{self, FloatTy, IntTy, PolyFnSig, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use std::iter;
@@ -141,7 +137,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> {
ExprKind::MethodCall(_, receiver, args, _) => {
if let Some(def_id) = self.cx.typeck_results().type_dependent_def_id(expr.hir_id) {
- let fn_sig = self.cx.tcx.fn_sig(def_id).subst_identity().skip_binder();
+ let fn_sig = self.cx.tcx.fn_sig(def_id).instantiate_identity().skip_binder();
for (expr, bound) in iter::zip(std::iter::once(*receiver).chain(args.iter()), fn_sig.inputs()) {
self.ty_bounds.push((*bound).into());
self.visit_expr(expr);
@@ -167,7 +163,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> {
.iter()
.find_map(|f_def| {
if f_def.ident(self.cx.tcx) == field.ident
- { Some(self.cx.tcx.type_of(f_def.did).subst_identity()) }
+ { Some(self.cx.tcx.type_of(f_def.did).instantiate_identity()) }
else { None }
});
self.ty_bounds.push(bound.into());
@@ -213,9 +209,9 @@ impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> {
fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<PolyFnSig<'tcx>> {
let node_ty = cx.typeck_results().node_type_opt(hir_id)?;
- // We can't use `Ty::fn_sig` because it automatically performs substs, this may result in FNs.
+ // We can't use `Ty::fn_sig` because it automatically performs args, this may result in FNs.
match node_ty.kind() {
- ty::FnDef(def_id, _) => Some(cx.tcx.fn_sig(*def_id).subst_identity()),
+ ty::FnDef(def_id, _) => Some(cx.tcx.fn_sig(*def_id).instantiate_identity()),
ty::FnPtr(fn_sig) => Some(*fn_sig),
_ => None,
}
diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs
index 12f2f37e3..58c278550 100644
--- a/src/tools/clippy/clippy_lints/src/dereference.rs
+++ b/src/tools/clippy/clippy_lints/src/dereference.rs
@@ -3,22 +3,23 @@ use clippy_utils::mir::{enclosing_mir, expr_local, local_assignments, used_exact
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
use clippy_utils::sugg::has_enclosing_paren;
-use clippy_utils::ty::{adt_and_variant_of_res, expr_sig, is_copy, peel_mid_ty_refs, ty_sig};
+use clippy_utils::ty::{is_copy, peel_mid_ty_refs};
use clippy_utils::{
- fn_def_id, get_parent_expr, get_parent_expr_for_hir, is_lint_allowed, path_to_local, walk_to_expr_usage,
+ expr_use_ctxt, get_parent_expr, get_parent_node, is_lint_allowed, path_to_local, DefinedTy, ExprUseNode,
};
+use hir::def::DefKind;
+use hir::MatchSource;
use rustc_ast::util::parser::{PREC_POSTFIX, PREC_PREFIX};
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::graph::iterate::{CycleDetector, TriColorDepthFirstSearch};
use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::{walk_ty, Visitor};
use rustc_hir::{
- self as hir,
- def_id::{DefId, LocalDefId},
- BindingAnnotation, Body, BodyId, BorrowKind, Closure, Expr, ExprKind, FnRetTy, GenericArg, HirId, ImplItem,
- ImplItemKind, Item, ItemKind, Local, MatchSource, Mutability, Node, Pat, PatKind, Path, QPath, TraitItem,
- TraitItemKind, TyKind, UnOp,
+ self as hir, BindingAnnotation, Body, BodyId, BorrowKind, Expr, ExprKind, HirId, Mutability, Node, Pat, PatKind,
+ Path, QPath, TyKind, UnOp,
};
use rustc_index::bit_set::BitSet;
use rustc_infer::infer::TyCtxtInferExt;
@@ -26,13 +27,15 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::{Rvalue, StatementKind};
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
use rustc_middle::ty::{
- self, Binder, BoundVariableKind, ClauseKind, EarlyBinder, FnSig, GenericArgKind, List, ParamEnv, ParamTy,
- ProjectionPredicate, Ty, TyCtxt, TypeVisitableExt, TypeckResults,
+ self, ClauseKind, EarlyBinder, FnSig, GenericArg, GenericArgKind, List, ParamEnv, ParamTy, ProjectionPredicate, Ty,
+ TyCtxt, TypeVisitableExt, TypeckResults,
};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{symbol::sym, Span, Symbol};
+use rustc_span::symbol::sym;
+use rustc_span::{Span, Symbol};
use rustc_trait_selection::infer::InferCtxtExt as _;
-use rustc_trait_selection::traits::{query::evaluate_obligation::InferCtxtExt as _, Obligation, ObligationCause};
+use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
+use rustc_trait_selection::traits::{Obligation, ObligationCause};
use std::collections::VecDeque;
declare_clippy_lint! {
@@ -77,6 +80,11 @@ declare_clippy_lint! {
/// Suggests that the receiver of the expression borrows
/// the expression.
///
+ /// ### Known problems
+ /// The lint cannot tell when the implementation of a trait
+ /// for `&T` and `T` do different things. Removing a borrow
+ /// in such a case can change the semantics of the code.
+ ///
/// ### Example
/// ```rust
/// fn fun(_a: &i32) {}
@@ -157,7 +165,7 @@ impl_lint_pass!(Dereferencing<'_> => [
#[derive(Default)]
pub struct Dereferencing<'tcx> {
- state: Option<(State, StateData)>,
+ state: Option<(State, StateData<'tcx>)>,
// While parsing a `deref` method call in ufcs form, the path to the function is itself an
// expression. This is to store the id of that expression so it can be skipped when
@@ -197,29 +205,28 @@ impl<'tcx> Dereferencing<'tcx> {
}
#[derive(Debug)]
-struct StateData {
+struct StateData<'tcx> {
/// Span of the top level expression
span: Span,
hir_id: HirId,
- position: Position,
+ adjusted_ty: Ty<'tcx>,
}
-#[derive(Debug)]
struct DerefedBorrow {
count: usize,
msg: &'static str,
- snip_expr: Option<HirId>,
+ stability: TyCoercionStability,
+ for_field_access: Option<Symbol>,
}
-#[derive(Debug)]
enum State {
// Any number of deref method calls.
DerefMethod {
// The number of calls in a sequence which changed the referenced type
ty_changed_count: usize,
- is_final_ufcs: bool,
+ is_ufcs: bool,
/// The required mutability
- target_mut: Mutability,
+ mutbl: Mutability,
},
DerefedBorrow(DerefedBorrow),
ExplicitDeref {
@@ -238,7 +245,7 @@ enum State {
// A reference operation considered by this lint pass
enum RefOp {
- Method(Mutability),
+ Method { mutbl: Mutability, is_ufcs: bool },
Deref,
AddrOf(Mutability),
}
@@ -288,48 +295,115 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
match (self.state.take(), kind) {
(None, kind) => {
let expr_ty = typeck.expr_ty(expr);
- let (position, adjustments) = walk_parents(cx, &mut self.possible_borrowers, expr, &self.msrv);
- match kind {
- RefOp::Deref => {
+ let use_cx = expr_use_ctxt(cx, expr);
+ let adjusted_ty = match &use_cx {
+ Some(use_cx) => match use_cx.adjustments {
+ [.., a] => a.target,
+ _ => expr_ty,
+ },
+ _ => typeck.expr_ty_adjusted(expr),
+ };
+
+ match (use_cx, kind) {
+ (Some(use_cx), RefOp::Deref) => {
let sub_ty = typeck.expr_ty(sub_expr);
- if let Position::FieldAccess {
- name,
- of_union: false,
- } = position
- && !ty_contains_field(sub_ty, name)
+ if let ExprUseNode::FieldAccess(name) = use_cx.node
+ && adjusted_ty.ty_adt_def().map_or(true, |adt| !adt.is_union())
+ && !ty_contains_field(sub_ty, name.name)
{
self.state = Some((
- State::ExplicitDerefField { name },
- StateData { span: expr.span, hir_id: expr.hir_id, position },
+ State::ExplicitDerefField { name: name.name },
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ adjusted_ty,
+ },
));
- } else if position.is_deref_stable() && sub_ty.is_ref() {
+ } else if sub_ty.is_ref()
+ // Linting method receivers would require verifying that name lookup
+ // would resolve the same way. This is complicated by trait methods.
+ && !use_cx.node.is_recv()
+ && let Some(ty) = use_cx.node.defined_ty(cx)
+ && TyCoercionStability::for_defined_ty(cx, ty, use_cx.node.is_return()).is_deref_stable()
+ {
self.state = Some((
State::ExplicitDeref { mutability: None },
- StateData { span: expr.span, hir_id: expr.hir_id, position },
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ adjusted_ty,
+ },
));
}
},
- RefOp::Method(target_mut)
+ (_, RefOp::Method { mutbl, is_ufcs })
if !is_lint_allowed(cx, EXPLICIT_DEREF_METHODS, expr.hir_id)
- && position.lint_explicit_deref() =>
+ // Allow explicit deref in method chains. e.g. `foo.deref().bar()`
+ && (is_ufcs || !in_postfix_position(cx, expr)) =>
{
let ty_changed_count = usize::from(!deref_method_same_type(expr_ty, typeck.expr_ty(sub_expr)));
self.state = Some((
State::DerefMethod {
ty_changed_count,
- is_final_ufcs: matches!(expr.kind, ExprKind::Call(..)),
- target_mut,
+ is_ufcs,
+ mutbl,
},
StateData {
span: expr.span,
hir_id: expr.hir_id,
- position,
+ adjusted_ty,
},
));
},
- RefOp::AddrOf(mutability) => {
+ (Some(use_cx), RefOp::AddrOf(mutability)) => {
+ let defined_ty = use_cx.node.defined_ty(cx);
+
+ // Check needless_borrow for generic arguments.
+ if !use_cx.is_ty_unified
+ && let Some(DefinedTy::Mir(ty)) = defined_ty
+ && let ty::Param(ty) = *ty.value.skip_binder().kind()
+ && let Some((hir_id, fn_id, i)) = match use_cx.node {
+ ExprUseNode::MethodArg(_, _, 0) => None,
+ ExprUseNode::MethodArg(hir_id, None, i) => {
+ typeck.type_dependent_def_id(hir_id).map(|id| (hir_id, id, i))
+ },
+ ExprUseNode::FnArg(&Expr { kind: ExprKind::Path(ref p), hir_id, .. }, i)
+ if !path_has_args(p) => match typeck.qpath_res(p, hir_id) {
+ Res::Def(DefKind::Fn | DefKind::Ctor(..) | DefKind::AssocFn, id) => {
+ Some((hir_id, id, i))
+ },
+ _ => None,
+ },
+ _ => None,
+ } && let count = needless_borrow_generic_arg_count(
+ cx,
+ &mut self.possible_borrowers,
+ fn_id,
+ typeck.node_args(hir_id),
+ i,
+ ty,
+ expr,
+ &self.msrv,
+ ) && count != 0
+ {
+ self.state = Some((
+ State::DerefedBorrow(DerefedBorrow {
+ count: count - 1,
+ msg: "the borrowed expression implements the required traits",
+ stability: TyCoercionStability::None,
+ for_field_access: None,
+ }),
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ adjusted_ty: use_cx.adjustments.last().map_or(expr_ty, |a| a.target),
+ },
+ ));
+ return;
+ }
+
// Find the number of times the borrow is auto-derefed.
- let mut iter = adjustments.iter();
+ let mut iter = use_cx.adjustments.iter();
let mut deref_count = 0usize;
let next_adjust = loop {
match iter.next() {
@@ -346,6 +420,58 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
};
};
+ let stability = defined_ty.map_or(TyCoercionStability::None, |ty| {
+ TyCoercionStability::for_defined_ty(cx, ty, use_cx.node.is_return())
+ });
+ let can_auto_borrow = match use_cx.node {
+ ExprUseNode::Callee => true,
+ ExprUseNode::FieldAccess(_) => adjusted_ty.ty_adt_def().map_or(true, |adt| !adt.is_union()),
+ ExprUseNode::MethodArg(hir_id, _, 0) if !use_cx.moved_before_use => {
+ // Check for calls to trait methods where the trait is implemented
+ // on a reference.
+ // Two cases need to be handled:
+ // * `self` methods on `&T` will never have auto-borrow
+ // * `&self` methods on `&T` can have auto-borrow, but `&self` methods on `T` will take
+ // priority.
+ if let Some(fn_id) = typeck.type_dependent_def_id(hir_id)
+ && let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
+ && let arg_ty
+ = cx.tcx.erase_regions(use_cx.adjustments.last().map_or(expr_ty, |a| a.target))
+ && let ty::Ref(_, sub_ty, _) = *arg_ty.kind()
+ && let args = cx
+ .typeck_results()
+ .node_args_opt(hir_id).map(|args| &args[1..]).unwrap_or_default()
+ && let impl_ty = if cx.tcx.fn_sig(fn_id)
+ .instantiate_identity()
+ .skip_binder()
+ .inputs()[0].is_ref()
+ {
+ // Trait methods taking `&self`
+ sub_ty
+ } else {
+ // Trait methods taking `self`
+ arg_ty
+ } && impl_ty.is_ref()
+ && cx.tcx.infer_ctxt().build()
+ .type_implements_trait(
+ trait_id,
+ [impl_ty.into()].into_iter().chain(args.iter().copied()),
+ cx.param_env,
+ )
+ .must_apply_modulo_regions()
+ {
+ false
+ } else {
+ true
+ }
+ },
+ _ => false,
+ };
+
+ let deref_msg =
+ "this expression creates a reference which is immediately dereferenced by the compiler";
+ let borrow_msg = "this expression borrows a value the compiler would automatically borrow";
+
// Determine the required number of references before any can be removed. In all cases the
// reference made by the current expression will be removed. After that there are four cases to
// handle.
@@ -368,26 +494,18 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
// };
// }
// ```
- let deref_msg =
- "this expression creates a reference which is immediately dereferenced by the compiler";
- let borrow_msg = "this expression borrows a value the compiler would automatically borrow";
- let impl_msg = "the borrowed expression implements the required traits";
-
- let (required_refs, msg, snip_expr) = if position.can_auto_borrow() {
- (1, if deref_count == 1 { borrow_msg } else { deref_msg }, None)
- } else if let Position::ImplArg(hir_id) = position {
- (0, impl_msg, Some(hir_id))
- } else if let Some(&Adjust::Borrow(AutoBorrow::Ref(_, mutability))) =
- next_adjust.map(|a| &a.kind)
+ let (required_refs, msg) = if can_auto_borrow {
+ (1, if deref_count == 1 { borrow_msg } else { deref_msg })
+ } else if let Some(&Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)),
+ ..
+ }) = next_adjust
+ && matches!(mutability, AutoBorrowMutability::Mut { .. })
+ && !stability.is_reborrow_stable()
{
- if matches!(mutability, AutoBorrowMutability::Mut { .. }) && !position.is_reborrow_stable()
- {
- (3, deref_msg, None)
- } else {
- (2, deref_msg, None)
- }
+ (3, deref_msg)
} else {
- (2, deref_msg, None)
+ (2, deref_msg)
};
if deref_count >= required_refs {
@@ -397,15 +515,19 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
// can't be removed without breaking the code. See earlier comment.
count: deref_count - required_refs,
msg,
- snip_expr,
+ stability,
+ for_field_access: match use_cx.node {
+ ExprUseNode::FieldAccess(name) => Some(name.name),
+ _ => None,
+ },
}),
StateData {
span: expr.span,
hir_id: expr.hir_id,
- position,
+ adjusted_ty: use_cx.adjustments.last().map_or(expr_ty, |a| a.target),
},
));
- } else if position.is_deref_stable()
+ } else if stability.is_deref_stable()
// Auto-deref doesn't combine with other adjustments
&& next_adjust.map_or(true, |a| matches!(a.kind, Adjust::Deref(_) | Adjust::Borrow(_)))
&& iter.all(|a| matches!(a.kind, Adjust::Deref(_) | Adjust::Borrow(_)))
@@ -415,24 +537,24 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
StateData {
span: expr.span,
hir_id: expr.hir_id,
- position,
+ adjusted_ty: use_cx.adjustments.last().map_or(expr_ty, |a| a.target),
},
));
}
},
- RefOp::Method(..) => (),
+ (None, _) | (_, RefOp::Method { .. }) => (),
}
},
(
Some((
State::DerefMethod {
- target_mut,
+ mutbl,
ty_changed_count,
..
},
data,
)),
- RefOp::Method(_),
+ RefOp::Method { is_ufcs, .. },
) => {
self.state = Some((
State::DerefMethod {
@@ -441,8 +563,8 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
} else {
ty_changed_count + 1
},
- is_final_ufcs: matches!(expr.kind, ExprKind::Call(..)),
- target_mut,
+ is_ufcs,
+ mutbl,
},
data,
));
@@ -457,33 +579,44 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
));
},
(Some((State::DerefedBorrow(state), data)), RefOp::AddrOf(mutability)) => {
- let position = data.position;
+ let adjusted_ty = data.adjusted_ty;
+ let stability = state.stability;
report(cx, expr, State::DerefedBorrow(state), data);
- if position.is_deref_stable() {
+ if stability.is_deref_stable() {
self.state = Some((
State::Borrow { mutability },
StateData {
span: expr.span,
hir_id: expr.hir_id,
- position,
+ adjusted_ty,
},
));
}
},
(Some((State::DerefedBorrow(state), data)), RefOp::Deref) => {
- let position = data.position;
+ let adjusted_ty = data.adjusted_ty;
+ let stability = state.stability;
+ let for_field_access = state.for_field_access;
report(cx, expr, State::DerefedBorrow(state), data);
- if let Position::FieldAccess{name, ..} = position
+ if let Some(name) = for_field_access
&& !ty_contains_field(typeck.expr_ty(sub_expr), name)
{
self.state = Some((
State::ExplicitDerefField { name },
- StateData { span: expr.span, hir_id: expr.hir_id, position },
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ adjusted_ty,
+ },
));
- } else if position.is_deref_stable() {
+ } else if stability.is_deref_stable() {
self.state = Some((
State::ExplicitDeref { mutability: None },
- StateData { span: expr.span, hir_id: expr.hir_id, position },
+ StateData {
+ span: expr.span,
+ hir_id: expr.hir_id,
+ adjusted_ty,
+ },
));
}
},
@@ -589,7 +722,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> {
pat.spans,
"this pattern creates a reference to a reference",
|diag| {
- diag.multipart_suggestion("try this", replacements, app);
+ diag.multipart_suggestion("try", replacements, app);
},
);
}
@@ -605,8 +738,8 @@ fn try_parse_ref_op<'tcx>(
typeck: &'tcx TypeckResults<'_>,
expr: &'tcx Expr<'_>,
) -> Option<(RefOp, &'tcx Expr<'tcx>)> {
- let (def_id, arg) = match expr.kind {
- ExprKind::MethodCall(_, arg, [], _) => (typeck.type_dependent_def_id(expr.hir_id)?, arg),
+ let (is_ufcs, def_id, arg) = match expr.kind {
+ ExprKind::MethodCall(_, arg, [], _) => (false, typeck.type_dependent_def_id(expr.hir_id)?, arg),
ExprKind::Call(
Expr {
kind: ExprKind::Path(path),
@@ -614,7 +747,7 @@ fn try_parse_ref_op<'tcx>(
..
},
[arg],
- ) => (typeck.qpath_res(path, *hir_id).opt_def_id()?, arg),
+ ) => (true, typeck.qpath_res(path, *hir_id).opt_def_id()?, arg),
ExprKind::Unary(UnOp::Deref, sub_expr) if !typeck.expr_ty(sub_expr).is_unsafe_ptr() => {
return Some((RefOp::Deref, sub_expr));
},
@@ -622,9 +755,21 @@ fn try_parse_ref_op<'tcx>(
_ => return None,
};
if tcx.is_diagnostic_item(sym::deref_method, def_id) {
- Some((RefOp::Method(Mutability::Not), arg))
+ Some((
+ RefOp::Method {
+ mutbl: Mutability::Not,
+ is_ufcs,
+ },
+ arg,
+ ))
} else if tcx.trait_of_item(def_id)? == tcx.lang_items().deref_mut_trait()? {
- Some((RefOp::Method(Mutability::Mut), arg))
+ Some((
+ RefOp::Method {
+ mutbl: Mutability::Mut,
+ is_ufcs,
+ },
+ arg,
+ ))
} else {
None
}
@@ -643,420 +788,165 @@ fn deref_method_same_type<'tcx>(result_ty: Ty<'tcx>, arg_ty: Ty<'tcx>) -> bool {
}
}
-/// The position of an expression relative to it's parent.
-#[derive(Clone, Copy, Debug)]
-enum Position {
- MethodReceiver,
- /// The method is defined on a reference type. e.g. `impl Foo for &T`
- MethodReceiverRefImpl,
- Callee,
- ImplArg(HirId),
- FieldAccess {
- name: Symbol,
- of_union: bool,
- }, // union fields cannot be auto borrowed
- Postfix,
- Deref,
- /// Any other location which will trigger auto-deref to a specific time.
- /// Contains the precedence of the parent expression and whether the target type is sized.
- DerefStable(i8, bool),
- /// Any other location which will trigger auto-reborrowing.
- /// Contains the precedence of the parent expression.
- ReborrowStable(i8),
- /// Contains the precedence of the parent expression.
- Other(i8),
-}
-impl Position {
- fn is_deref_stable(self) -> bool {
- matches!(self, Self::DerefStable(..))
+fn path_has_args(p: &QPath<'_>) -> bool {
+ match *p {
+ QPath::Resolved(_, Path { segments: [.., s], .. }) | QPath::TypeRelative(_, s) => s.args.is_some(),
+ _ => false,
}
+}
- fn is_reborrow_stable(self) -> bool {
- matches!(self, Self::DerefStable(..) | Self::ReborrowStable(_))
+fn in_postfix_position<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> bool {
+ if let Some(parent) = get_parent_expr(cx, e)
+ && parent.span.ctxt() == e.span.ctxt()
+ {
+ match parent.kind {
+ ExprKind::Call(child, _) | ExprKind::MethodCall(_, child, _, _) | ExprKind::Index(child, _, _)
+ if child.hir_id == e.hir_id => true,
+ ExprKind::Match(.., MatchSource::TryDesugar(_) | MatchSource::AwaitDesugar)
+ | ExprKind::Field(_, _) => true,
+ _ => false,
+ }
+ } else {
+ false
}
+}
- fn can_auto_borrow(self) -> bool {
- matches!(
- self,
- Self::MethodReceiver | Self::FieldAccess { of_union: false, .. } | Self::Callee
- )
+#[derive(Clone, Copy)]
+enum TyCoercionStability {
+ Deref,
+ Reborrow,
+ None,
+}
+impl TyCoercionStability {
+ fn is_deref_stable(self) -> bool {
+ matches!(self, Self::Deref)
}
- fn lint_explicit_deref(self) -> bool {
- matches!(self, Self::Other(_) | Self::DerefStable(..) | Self::ReborrowStable(_))
+ fn is_reborrow_stable(self) -> bool {
+ matches!(self, Self::Deref | Self::Reborrow)
}
- fn precedence(self) -> i8 {
- match self {
- Self::MethodReceiver
- | Self::MethodReceiverRefImpl
- | Self::Callee
- | Self::FieldAccess { .. }
- | Self::Postfix => PREC_POSTFIX,
- Self::ImplArg(_) | Self::Deref => PREC_PREFIX,
- Self::DerefStable(p, _) | Self::ReborrowStable(p) | Self::Other(p) => p,
+ fn for_defined_ty<'tcx>(cx: &LateContext<'tcx>, ty: DefinedTy<'tcx>, for_return: bool) -> Self {
+ match ty {
+ DefinedTy::Hir(ty) => Self::for_hir_ty(ty),
+ DefinedTy::Mir(ty) => Self::for_mir_ty(
+ cx.tcx,
+ ty.param_env,
+ cx.tcx.erase_late_bound_regions(ty.value),
+ for_return,
+ ),
}
}
-}
-
-/// Walks up the parent expressions attempting to determine both how stable the auto-deref result
-/// is, and which adjustments will be applied to it. Note this will not consider auto-borrow
-/// locations as those follow different rules.
-#[expect(clippy::too_many_lines)]
-fn walk_parents<'tcx>(
- cx: &LateContext<'tcx>,
- possible_borrowers: &mut Vec<(LocalDefId, PossibleBorrowerMap<'tcx, 'tcx>)>,
- e: &'tcx Expr<'_>,
- msrv: &Msrv,
-) -> (Position, &'tcx [Adjustment<'tcx>]) {
- let mut adjustments = [].as_slice();
- let mut precedence = 0i8;
- let ctxt = e.span.ctxt();
- let position = walk_to_expr_usage(cx, e, &mut |parent, child_id| {
- // LocalTableInContext returns the wrong lifetime, so go use `expr_adjustments` instead.
- if adjustments.is_empty() && let Node::Expr(e) = cx.tcx.hir().get(child_id) {
- adjustments = cx.typeck_results().expr_adjustments(e);
- }
- match parent {
- Node::Local(Local { ty: Some(ty), span, .. }) if span.ctxt() == ctxt => {
- Some(binding_ty_auto_deref_stability(cx, ty, precedence, List::empty()))
- },
- Node::Item(&Item {
- kind: ItemKind::Static(..) | ItemKind::Const(..),
- owner_id,
- span,
- ..
- })
- | Node::TraitItem(&TraitItem {
- kind: TraitItemKind::Const(..),
- owner_id,
- span,
- ..
- })
- | Node::ImplItem(&ImplItem {
- kind: ImplItemKind::Const(..),
- owner_id,
- span,
- ..
- }) if span.ctxt() == ctxt => {
- let ty = cx.tcx.type_of(owner_id.def_id).subst_identity();
- Some(ty_auto_deref_stability(cx.tcx, cx.param_env, ty, precedence).position_for_result(cx))
- },
- Node::Item(&Item {
- kind: ItemKind::Fn(..),
- owner_id,
- span,
- ..
- })
- | Node::TraitItem(&TraitItem {
- kind: TraitItemKind::Fn(..),
- owner_id,
- span,
- ..
- })
- | Node::ImplItem(&ImplItem {
- kind: ImplItemKind::Fn(..),
- owner_id,
- span,
- ..
- }) if span.ctxt() == ctxt => {
- let output = cx
- .tcx
- .erase_late_bound_regions(cx.tcx.fn_sig(owner_id).subst_identity().output());
- Some(ty_auto_deref_stability(cx.tcx, cx.param_env, output, precedence).position_for_result(cx))
- },
-
- Node::ExprField(field) if field.span.ctxt() == ctxt => match get_parent_expr_for_hir(cx, field.hir_id) {
- Some(Expr {
- hir_id,
- kind: ExprKind::Struct(path, ..),
- ..
- }) => adt_and_variant_of_res(cx, cx.qpath_res(path, *hir_id))
- .and_then(|(adt, variant)| {
- variant
- .fields
- .iter()
- .find(|f| f.name == field.ident.name)
- .map(|f| (adt, f))
- })
- .map(|(adt, field_def)| {
- ty_auto_deref_stability(
- cx.tcx,
- // Use the param_env of the target type.
- cx.tcx.param_env(adt.did()),
- cx.tcx.type_of(field_def.did).subst_identity(),
- precedence,
- )
- .position_for_arg()
- }),
- _ => None,
- },
+ // Checks the stability of type coercions when assigned to a binding with the given explicit type.
+ //
+ // e.g.
+ // let x = Box::new(Box::new(0u32));
+ // let y1: &Box<_> = x.deref();
+ // let y2: &Box<_> = &x;
+ //
+ // Here `y1` and `y2` would resolve to different types, so the type `&Box<_>` is not stable when
+ // switching to auto-dereferencing.
+ fn for_hir_ty<'tcx>(ty: &'tcx hir::Ty<'tcx>) -> Self {
+ let TyKind::Ref(_, ty) = &ty.kind else {
+ return Self::None;
+ };
+ let mut ty = ty;
- Node::Expr(parent) if parent.span.ctxt() == ctxt => match parent.kind {
- ExprKind::Ret(_) => {
- let owner_id = cx.tcx.hir().body_owner_def_id(cx.enclosing_body.unwrap());
- Some(
- if let Node::Expr(
- closure_expr @ Expr {
- kind: ExprKind::Closure(closure),
- ..
- },
- ) = cx.tcx.hir().get_by_def_id(owner_id)
- {
- closure_result_position(cx, closure, cx.typeck_results().expr_ty(closure_expr), precedence)
- } else {
- let output = cx
- .tcx
- .erase_late_bound_regions(cx.tcx.fn_sig(owner_id).subst_identity().output());
- ty_auto_deref_stability(cx.tcx, cx.param_env, output, precedence).position_for_result(cx)
- },
- )
- },
- ExprKind::Closure(closure) => Some(closure_result_position(
- cx,
- closure,
- cx.typeck_results().expr_ty(parent),
- precedence,
- )),
- ExprKind::Call(func, _) if func.hir_id == child_id => {
- (child_id == e.hir_id).then_some(Position::Callee)
+ loop {
+ break match ty.ty.kind {
+ TyKind::Ref(_, ref ref_ty) => {
+ ty = ref_ty;
+ continue;
},
- ExprKind::Call(func, args) => args
- .iter()
- .position(|arg| arg.hir_id == child_id)
- .zip(expr_sig(cx, func))
- .and_then(|(i, sig)| {
- sig.input_with_hir(i).map(|(hir_ty, ty)| {
- match hir_ty {
- // Type inference for closures can depend on how they're called. Only go by the explicit
- // types here.
- Some(hir_ty) => {
- binding_ty_auto_deref_stability(cx, hir_ty, precedence, ty.bound_vars())
- },
- None => {
- // `e.hir_id == child_id` for https://github.com/rust-lang/rust-clippy/issues/9739
- // `!call_is_qualified(func)` for https://github.com/rust-lang/rust-clippy/issues/9782
- if e.hir_id == child_id
- && !call_is_qualified(func)
- && let ty::Param(param_ty) = ty.skip_binder().kind()
- {
- needless_borrow_impl_arg_position(
- cx,
- possible_borrowers,
- parent,
- i,
- *param_ty,
- e,
- precedence,
- msrv,
- )
- } else {
- ty_auto_deref_stability(
- cx.tcx,
- // Use the param_env of the target function.
- sig.predicates_id().map_or(ParamEnv::empty(), |id| cx.tcx.param_env(id)),
- cx.tcx.erase_late_bound_regions(ty),
- precedence
- ).position_for_arg()
- }
- },
- }
+ TyKind::Path(
+ QPath::TypeRelative(_, path)
+ | QPath::Resolved(
+ _,
+ Path {
+ segments: [.., path], ..
+ },
+ ),
+ ) => {
+ if let Some(args) = path.args
+ && args.args.iter().any(|arg| match arg {
+ hir::GenericArg::Infer(_) => true,
+ hir::GenericArg::Type(ty) => ty_contains_infer(ty),
+ _ => false,
})
- }),
- ExprKind::MethodCall(method, receiver, args, _) => {
- let fn_id = cx.typeck_results().type_dependent_def_id(parent.hir_id).unwrap();
- if receiver.hir_id == child_id {
- // Check for calls to trait methods where the trait is implemented on a reference.
- // Two cases need to be handled:
- // * `self` methods on `&T` will never have auto-borrow
- // * `&self` methods on `&T` can have auto-borrow, but `&self` methods on `T` will take
- // priority.
- if e.hir_id != child_id {
- return Some(Position::ReborrowStable(precedence))
- } else if let Some(trait_id) = cx.tcx.trait_of_item(fn_id)
- && let arg_ty = cx.tcx.erase_regions(cx.typeck_results().expr_ty_adjusted(e))
- && let ty::Ref(_, sub_ty, _) = *arg_ty.kind()
- && let subs = cx
- .typeck_results()
- .node_substs_opt(parent.hir_id).map(|subs| &subs[1..]).unwrap_or_default()
- && let impl_ty = if cx.tcx.fn_sig(fn_id)
- .subst_identity()
- .skip_binder()
- .inputs()[0].is_ref()
- {
- // Trait methods taking `&self`
- sub_ty
- } else {
- // Trait methods taking `self`
- arg_ty
- } && impl_ty.is_ref()
- && let infcx = cx.tcx.infer_ctxt().build()
- && infcx
- .type_implements_trait(
- trait_id,
- [impl_ty.into()].into_iter().chain(subs.iter().copied()),
- cx.param_env,
- )
- .must_apply_modulo_regions()
- {
- return Some(Position::MethodReceiverRefImpl)
- }
- return Some(Position::MethodReceiver);
+ {
+ Self::Reborrow
+ } else {
+ Self::Deref
}
- args.iter().position(|arg| arg.hir_id == child_id).map(|i| {
- let ty = cx.tcx.fn_sig(fn_id).subst_identity().input(i + 1);
- // `e.hir_id == child_id` for https://github.com/rust-lang/rust-clippy/issues/9739
- // `method.args.is_none()` for https://github.com/rust-lang/rust-clippy/issues/9782
- if e.hir_id == child_id
- && method.args.is_none()
- && let ty::Param(param_ty) = ty.skip_binder().kind()
- {
- needless_borrow_impl_arg_position(
- cx,
- possible_borrowers,
- parent,
- i + 1,
- *param_ty,
- e,
- precedence,
- msrv,
- )
- } else {
- ty_auto_deref_stability(
- cx.tcx,
- // Use the param_env of the target function.
- cx.tcx.param_env(fn_id),
- cx.tcx.erase_late_bound_regions(ty),
- precedence,
- )
- .position_for_arg()
- }
- })
- },
- ExprKind::Field(child, name) if child.hir_id == e.hir_id => Some(Position::FieldAccess {
- name: name.name,
- of_union: is_union(cx.typeck_results(), child),
- }),
- ExprKind::Unary(UnOp::Deref, child) if child.hir_id == e.hir_id => Some(Position::Deref),
- ExprKind::Match(child, _, MatchSource::TryDesugar | MatchSource::AwaitDesugar)
- | ExprKind::Index(child, _)
- if child.hir_id == e.hir_id =>
- {
- Some(Position::Postfix)
},
- _ if child_id == e.hir_id => {
- precedence = parent.precedence().order();
- None
- },
- _ => None,
- },
- _ => None,
+ TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::Ptr(_)
+ | TyKind::BareFn(_)
+ | TyKind::Never
+ | TyKind::Tup(_)
+ | TyKind::Path(_) => Self::Deref,
+ TyKind::OpaqueDef(..)
+ | TyKind::Infer
+ | TyKind::Typeof(..)
+ | TyKind::TraitObject(..)
+ | TyKind::Err(_) => Self::Reborrow,
+ };
}
- })
- .unwrap_or(Position::Other(precedence));
- (position, adjustments)
-}
-
-fn is_union<'tcx>(typeck: &'tcx TypeckResults<'_>, path_expr: &'tcx Expr<'_>) -> bool {
- typeck
- .expr_ty_adjusted(path_expr)
- .ty_adt_def()
- .map_or(false, rustc_middle::ty::AdtDef::is_union)
-}
-
-fn closure_result_position<'tcx>(
- cx: &LateContext<'tcx>,
- closure: &'tcx Closure<'_>,
- ty: Ty<'tcx>,
- precedence: i8,
-) -> Position {
- match closure.fn_decl.output {
- FnRetTy::Return(hir_ty) => {
- if let Some(sig) = ty_sig(cx, ty)
- && let Some(output) = sig.output()
- {
- binding_ty_auto_deref_stability(cx, hir_ty, precedence, output.bound_vars())
- } else {
- Position::Other(precedence)
- }
- },
- FnRetTy::DefaultReturn(_) => Position::Other(precedence),
}
-}
-// Checks the stability of auto-deref when assigned to a binding with the given explicit type.
-//
-// e.g.
-// let x = Box::new(Box::new(0u32));
-// let y1: &Box<_> = x.deref();
-// let y2: &Box<_> = &x;
-//
-// Here `y1` and `y2` would resolve to different types, so the type `&Box<_>` is not stable when
-// switching to auto-dereferencing.
-fn binding_ty_auto_deref_stability<'tcx>(
- cx: &LateContext<'tcx>,
- ty: &'tcx hir::Ty<'_>,
- precedence: i8,
- binder_args: &'tcx List<BoundVariableKind>,
-) -> Position {
- let TyKind::Ref(_, ty) = &ty.kind else {
- return Position::Other(precedence);
- };
- let mut ty = ty;
+ fn for_mir_ty<'tcx>(tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>, for_return: bool) -> Self {
+ let ty::Ref(_, mut ty, _) = *ty.kind() else {
+ return Self::None;
+ };
- loop {
- break match ty.ty.kind {
- TyKind::Ref(_, ref ref_ty) => {
- ty = ref_ty;
- continue;
- },
- TyKind::Path(
- QPath::TypeRelative(_, path)
- | QPath::Resolved(
- _,
- Path {
- segments: [.., path], ..
- },
- ),
- ) => {
- if let Some(args) = path.args
- && args.args.iter().any(|arg| match arg {
- GenericArg::Infer(_) => true,
- GenericArg::Type(ty) => ty_contains_infer(ty),
- _ => false,
- })
+ ty = tcx.try_normalize_erasing_regions(param_env, ty).unwrap_or(ty);
+ loop {
+ break match *ty.kind() {
+ ty::Ref(_, ref_ty, _) => {
+ ty = ref_ty;
+ continue;
+ },
+ ty::Param(_) if for_return => Self::Deref,
+ ty::Alias(ty::Weak | ty::Inherent, _) => unreachable!("should have been normalized away above"),
+ ty::Alias(ty::Projection, _) if !for_return && ty.has_non_region_param() => Self::Reborrow,
+ ty::Infer(_)
+ | ty::Error(_)
+ | ty::Bound(..)
+ | ty::Alias(ty::Opaque, ..)
+ | ty::Placeholder(_)
+ | ty::Dynamic(..)
+ | ty::Param(_) => Self::Reborrow,
+ ty::Adt(_, args)
+ if ty.has_placeholders()
+ || ty.has_opaque_types()
+ || (!for_return && args.has_non_region_param()) =>
{
- Position::ReborrowStable(precedence)
- } else {
- Position::DerefStable(
- precedence,
- cx.tcx
- .erase_late_bound_regions(Binder::bind_with_vars(
- cx.typeck_results().node_type(ty.ty.hir_id),
- binder_args,
- ))
- .is_sized(cx.tcx, cx.param_env.without_caller_bounds()),
- )
- }
- },
- TyKind::Slice(_) => Position::DerefStable(precedence, false),
- TyKind::Array(..) | TyKind::Ptr(_) | TyKind::BareFn(_) => Position::DerefStable(precedence, true),
- TyKind::Never
- | TyKind::Tup(_)
- | TyKind::Path(_) => Position::DerefStable(
- precedence,
- cx.tcx
- .erase_late_bound_regions(Binder::bind_with_vars(
- cx.typeck_results().node_type(ty.ty.hir_id),
- binder_args,
- ))
- .is_sized(cx.tcx, cx.param_env.without_caller_bounds()),
- ),
- TyKind::OpaqueDef(..) | TyKind::Infer | TyKind::Typeof(..) | TyKind::TraitObject(..) | TyKind::Err(_) => {
- Position::ReborrowStable(precedence)
- },
- };
+ Self::Reborrow
+ },
+ ty::Bool
+ | ty::Char
+ | ty::Int(_)
+ | ty::Uint(_)
+ | ty::Array(..)
+ | ty::Float(_)
+ | ty::RawPtr(..)
+ | ty::FnPtr(_)
+ | ty::Str
+ | ty::Slice(..)
+ | ty::Adt(..)
+ | ty::Foreign(_)
+ | ty::FnDef(..)
+ | ty::Generator(..)
+ | ty::GeneratorWitness(..)
+ | ty::GeneratorWitnessMIR(..)
+ | ty::Closure(..)
+ | ty::Never
+ | ty::Tuple(_)
+ | ty::Alias(ty::Projection, _) => Self::Deref,
+ };
+ }
}
}
@@ -1078,10 +968,10 @@ fn ty_contains_infer(ty: &hir::Ty<'_>) -> bool {
}
}
- fn visit_generic_arg(&mut self, arg: &GenericArg<'_>) {
- if self.0 || matches!(arg, GenericArg::Infer(_)) {
+ fn visit_generic_arg(&mut self, arg: &hir::GenericArg<'_>) {
+ if self.0 || matches!(arg, hir::GenericArg::Infer(_)) {
self.0 = true;
- } else if let GenericArg::Type(ty) = arg {
+ } else if let hir::GenericArg::Type(ty) = arg {
self.visit_ty(ty);
}
}
@@ -1091,49 +981,29 @@ fn ty_contains_infer(ty: &hir::Ty<'_>) -> bool {
v.0
}
-fn call_is_qualified(expr: &Expr<'_>) -> bool {
- if let ExprKind::Path(path) = &expr.kind {
- match path {
- QPath::Resolved(_, path) => path.segments.last().map_or(false, |segment| segment.args.is_some()),
- QPath::TypeRelative(_, segment) => segment.args.is_some(),
- QPath::LangItem(..) => false,
- }
- } else {
- false
- }
-}
-
-// Checks whether:
-// * child is an expression of the form `&e` in an argument position requiring an `impl Trait`
-// * `e`'s type implements `Trait` and is copyable
-// If the conditions are met, returns `Some(Position::ImplArg(..))`; otherwise, returns `None`.
-// The "is copyable" condition is to avoid the case where removing the `&` means `e` would have to
-// be moved, but it cannot be.
-#[expect(clippy::too_many_arguments, clippy::too_many_lines)]
-fn needless_borrow_impl_arg_position<'tcx>(
+/// Checks for the number of borrow expressions which can be removed from the given expression
+/// where the expression is used as an argument to a function expecting a generic type.
+///
+/// The following constraints will be checked:
+/// * The borrowed expression meets all the generic type's constraints.
+/// * The generic type appears only once in the functions signature.
+/// * The borrowed value will not be moved if it is used later in the function.
+#[expect(clippy::too_many_arguments)]
+fn needless_borrow_generic_arg_count<'tcx>(
cx: &LateContext<'tcx>,
possible_borrowers: &mut Vec<(LocalDefId, PossibleBorrowerMap<'tcx, 'tcx>)>,
- parent: &Expr<'tcx>,
+ fn_id: DefId,
+ callee_args: &'tcx List<GenericArg<'tcx>>,
arg_index: usize,
param_ty: ParamTy,
mut expr: &Expr<'tcx>,
- precedence: i8,
msrv: &Msrv,
-) -> Position {
+) -> usize {
let destruct_trait_def_id = cx.tcx.lang_items().destruct_trait();
let sized_trait_def_id = cx.tcx.lang_items().sized_trait();
- let Some(callee_def_id) = fn_def_id(cx, parent) else { return Position::Other(precedence) };
- let fn_sig = cx.tcx.fn_sig(callee_def_id).subst_identity().skip_binder();
- let substs_with_expr_ty = cx
- .typeck_results()
- .node_substs(if let ExprKind::Call(callee, _) = parent.kind {
- callee.hir_id
- } else {
- parent.hir_id
- });
-
- let predicates = cx.tcx.param_env(callee_def_id).caller_bounds();
+ let fn_sig = cx.tcx.fn_sig(fn_id).instantiate_identity().skip_binder();
+ let predicates = cx.tcx.param_env(fn_id).caller_bounds();
let projection_predicates = predicates
.iter()
.filter_map(|predicate| {
@@ -1168,7 +1038,7 @@ fn needless_borrow_impl_arg_position<'tcx>(
|| cx.tcx.is_diagnostic_item(sym::Any, trait_def_id)
})
{
- return Position::Other(precedence);
+ return 0;
}
// See:
@@ -1176,14 +1046,14 @@ fn needless_borrow_impl_arg_position<'tcx>(
// - https://github.com/rust-lang/rust-clippy/pull/9674#issuecomment-1292225232
if projection_predicates
.iter()
- .any(|projection_predicate| is_mixed_projection_predicate(cx, callee_def_id, projection_predicate))
+ .any(|projection_predicate| is_mixed_projection_predicate(cx, fn_id, projection_predicate))
{
- return Position::Other(precedence);
+ return 0;
}
- // `substs_with_referent_ty` can be constructed outside of `check_referent` because the same
+ // `args_with_referent_ty` can be constructed outside of `check_referent` because the same
// elements are modified each time `check_referent` is called.
- let mut substs_with_referent_ty = substs_with_expr_ty.to_vec();
+ let mut args_with_referent_ty = callee_args.to_vec();
let mut check_reference_and_referent = |reference, referent| {
let referent_ty = cx.typeck_results().expr_ty(referent);
@@ -1207,7 +1077,7 @@ fn needless_borrow_impl_arg_position<'tcx>(
fn_sig,
arg_index,
&projection_predicates,
- &mut substs_with_referent_ty,
+ &mut args_with_referent_ty,
) {
return false;
}
@@ -1216,34 +1086,29 @@ fn needless_borrow_impl_arg_position<'tcx>(
if let ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder()
&& cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_predicate.trait_ref.def_id)
&& let ty::Param(param_ty) = trait_predicate.self_ty().kind()
- && let GenericArgKind::Type(ty) = substs_with_referent_ty[param_ty.index as usize].unpack()
+ && let GenericArgKind::Type(ty) = args_with_referent_ty[param_ty.index as usize].unpack()
&& ty.is_array()
&& !msrv.meets(msrvs::ARRAY_INTO_ITERATOR)
{
return false;
}
- let predicate = EarlyBinder::bind(predicate).subst(cx.tcx, &substs_with_referent_ty);
+ let predicate = EarlyBinder::bind(predicate).instantiate(cx.tcx, &args_with_referent_ty);
let obligation = Obligation::new(cx.tcx, ObligationCause::dummy(), cx.param_env, predicate);
let infcx = cx.tcx.infer_ctxt().build();
infcx.predicate_must_hold_modulo_regions(&obligation)
})
};
- let mut needless_borrow = false;
+ let mut count = 0;
while let ExprKind::AddrOf(_, _, referent) = expr.kind {
if !check_reference_and_referent(expr, referent) {
break;
}
expr = referent;
- needless_borrow = true;
- }
-
- if needless_borrow {
- Position::ImplArg(expr.hir_id)
- } else {
- Position::Other(precedence)
+ count += 1;
}
+ count
}
fn has_ref_mut_self_method(cx: &LateContext<'_>, trait_def_id: DefId) -> bool {
@@ -1252,7 +1117,12 @@ fn has_ref_mut_self_method(cx: &LateContext<'_>, trait_def_id: DefId) -> bool {
.in_definition_order()
.any(|assoc_item| {
if assoc_item.fn_has_self_parameter {
- let self_ty = cx.tcx.fn_sig(assoc_item.def_id).subst_identity().skip_binder().inputs()[0];
+ let self_ty = cx
+ .tcx
+ .fn_sig(assoc_item.def_id)
+ .instantiate_identity()
+ .skip_binder()
+ .inputs()[0];
matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Mut))
} else {
false
@@ -1301,7 +1171,7 @@ fn referent_used_exactly_once<'tcx>(
&& let [location] = *local_assignments(mir, local).as_slice()
&& let Some(statement) = mir.basic_blocks[location.block].statements.get(location.statement_index)
&& let StatementKind::Assign(box (_, Rvalue::Ref(_, _, place))) = statement.kind
- && !place.has_deref()
+ && !place.is_indirect_first_projection()
// Ensure not in a loop (https://github.com/rust-lang/rust-clippy/issues/9710)
&& TriColorDepthFirstSearch::new(&mir.basic_blocks).run_from(location.block, &mut CycleDetector).is_none()
{
@@ -1323,7 +1193,7 @@ fn referent_used_exactly_once<'tcx>(
}
}
-// Iteratively replaces `param_ty` with `new_ty` in `substs`, and similarly for each resulting
+// Iteratively replaces `param_ty` with `new_ty` in `args`, and similarly for each resulting
// projected type that is a type parameter. Returns `false` if replacing the types would have an
// effect on the function signature beyond substituting `new_ty` for `param_ty`.
// See: https://github.com/rust-lang/rust-clippy/pull/9136#discussion_r927212757
@@ -1334,11 +1204,11 @@ fn replace_types<'tcx>(
fn_sig: FnSig<'tcx>,
arg_index: usize,
projection_predicates: &[ProjectionPredicate<'tcx>],
- substs: &mut [ty::GenericArg<'tcx>],
+ args: &mut [ty::GenericArg<'tcx>],
) -> bool {
- let mut replaced = BitSet::new_empty(substs.len());
+ let mut replaced = BitSet::new_empty(args.len());
- let mut deque = VecDeque::with_capacity(substs.len());
+ let mut deque = VecDeque::with_capacity(args.len());
deque.push_back((param_ty, new_ty));
while let Some((param_ty, new_ty)) = deque.pop_front() {
@@ -1352,7 +1222,7 @@ fn replace_types<'tcx>(
return false;
}
- substs[param_ty.index as usize] = ty::GenericArg::from(new_ty);
+ args[param_ty.index as usize] = ty::GenericArg::from(new_ty);
// The `replaced.insert(...)` check provides some protection against infinite loops.
if replaced.insert(param_ty.index) {
@@ -1367,7 +1237,7 @@ fn replace_types<'tcx>(
));
if let Ok(projected_ty) = cx.tcx.try_normalize_erasing_regions(cx.param_env, projection)
- && substs[term_param_ty.index as usize] != ty::GenericArg::from(projected_ty)
+ && args[term_param_ty.index as usize] != ty::GenericArg::from(projected_ty)
{
deque.push_back((*term_param_ty, projected_ty));
}
@@ -1379,95 +1249,6 @@ fn replace_types<'tcx>(
true
}
-struct TyPosition<'tcx> {
- position: Position,
- ty: Option<Ty<'tcx>>,
-}
-impl From<Position> for TyPosition<'_> {
- fn from(position: Position) -> Self {
- Self { position, ty: None }
- }
-}
-impl<'tcx> TyPosition<'tcx> {
- fn new_deref_stable_for_result(precedence: i8, ty: Ty<'tcx>) -> Self {
- Self {
- position: Position::ReborrowStable(precedence),
- ty: Some(ty),
- }
- }
- fn position_for_result(self, cx: &LateContext<'tcx>) -> Position {
- match (self.position, self.ty) {
- (Position::ReborrowStable(precedence), Some(ty)) => {
- Position::DerefStable(precedence, ty.is_sized(cx.tcx, cx.param_env))
- },
- (position, _) => position,
- }
- }
- fn position_for_arg(self) -> Position {
- self.position
- }
-}
-
-// Checks whether a type is stable when switching to auto dereferencing,
-fn ty_auto_deref_stability<'tcx>(
- tcx: TyCtxt<'tcx>,
- param_env: ParamEnv<'tcx>,
- ty: Ty<'tcx>,
- precedence: i8,
-) -> TyPosition<'tcx> {
- let ty::Ref(_, mut ty, _) = *ty.kind() else {
- return Position::Other(precedence).into();
- };
-
- ty = tcx.try_normalize_erasing_regions(param_env, ty).unwrap_or(ty);
-
- loop {
- break match *ty.kind() {
- ty::Ref(_, ref_ty, _) => {
- ty = ref_ty;
- continue;
- },
- ty::Param(_) => TyPosition::new_deref_stable_for_result(precedence, ty),
- ty::Alias(ty::Weak, _) => unreachable!("should have been normalized away above"),
- ty::Alias(ty::Inherent, _) => unreachable!("inherent projection should have been normalized away above"),
- ty::Alias(ty::Projection, _) if ty.has_non_region_param() => {
- TyPosition::new_deref_stable_for_result(precedence, ty)
- },
- ty::Infer(_)
- | ty::Error(_)
- | ty::Bound(..)
- | ty::Alias(ty::Opaque, ..)
- | ty::Placeholder(_)
- | ty::Dynamic(..) => Position::ReborrowStable(precedence).into(),
- ty::Adt(..) if ty.has_placeholders() || ty.has_opaque_types() => {
- Position::ReborrowStable(precedence).into()
- },
- ty::Adt(_, substs) if substs.has_non_region_param() => {
- TyPosition::new_deref_stable_for_result(precedence, ty)
- },
- ty::Bool
- | ty::Char
- | ty::Int(_)
- | ty::Uint(_)
- | ty::Array(..)
- | ty::Float(_)
- | ty::RawPtr(..)
- | ty::FnPtr(_) => Position::DerefStable(precedence, true).into(),
- ty::Str | ty::Slice(..) => Position::DerefStable(precedence, false).into(),
- ty::Adt(..)
- | ty::Foreign(_)
- | ty::FnDef(..)
- | ty::Generator(..)
- | ty::GeneratorWitness(..)
- | ty::GeneratorWitnessMIR(..)
- | ty::Closure(..)
- | ty::Never
- | ty::Tuple(_)
- | ty::Alias(ty::Projection, _) => Position::DerefStable(precedence, ty.is_sized(tcx, param_env)).into(),
- };
- }
-}
-
fn ty_contains_field(ty: Ty<'_>, name: Symbol) -> bool {
if let ty::Adt(adt, _) = *ty.kind() {
adt.is_struct() && adt.all_fields().any(|f| f.name == name)
@@ -1477,12 +1258,12 @@ fn ty_contains_field(ty: Ty<'_>, name: Symbol) -> bool {
}
#[expect(clippy::needless_pass_by_value, clippy::too_many_lines)]
-fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data: StateData) {
+fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data: StateData<'tcx>) {
match state {
State::DerefMethod {
ty_changed_count,
- is_final_ufcs,
- target_mut,
+ is_ufcs,
+ mutbl,
} => {
let mut app = Applicability::MachineApplicable;
let (expr_str, _expr_is_macro_call) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
@@ -1497,12 +1278,12 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
};
let addr_of_str = if ty_changed_count < ref_count {
// Check if a reborrow from &mut T -> &T is required.
- if target_mut == Mutability::Not && matches!(ty.kind(), ty::Ref(_, _, Mutability::Mut)) {
+ if mutbl == Mutability::Not && matches!(ty.kind(), ty::Ref(_, _, Mutability::Mut)) {
"&*"
} else {
""
}
- } else if target_mut == Mutability::Mut {
+ } else if mutbl == Mutability::Mut {
"&mut "
} else {
"&"
@@ -1519,7 +1300,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
*/
// Fix #10850, do not lint if it's `Foo::deref` instead of `foo.deref()`.
- if is_final_ufcs {
+ if is_ufcs {
return;
}
@@ -1527,24 +1308,30 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
cx,
EXPLICIT_DEREF_METHODS,
data.span,
- match target_mut {
+ match mutbl {
Mutability::Not => "explicit `deref` method call",
Mutability::Mut => "explicit `deref_mut` method call",
},
- "try this",
+ "try",
format!("{addr_of_str}{deref_str}{expr_str}"),
app,
);
},
State::DerefedBorrow(state) => {
let mut app = Applicability::MachineApplicable;
- let snip_expr = state.snip_expr.map_or(expr, |hir_id| cx.tcx.hir().expect_expr(hir_id));
- let (snip, snip_is_macro) = snippet_with_context(cx, snip_expr.span, data.span.ctxt(), "..", &mut app);
+ let (snip, snip_is_macro) = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app);
span_lint_hir_and_then(cx, NEEDLESS_BORROW, data.hir_id, data.span, state.msg, |diag| {
- let calls_field = matches!(expr.kind, ExprKind::Field(..)) && matches!(data.position, Position::Callee);
+ let (precedence, calls_field) = match get_parent_node(cx.tcx, data.hir_id) {
+ Some(Node::Expr(e)) => match e.kind {
+ ExprKind::Call(callee, _) if callee.hir_id != data.hir_id => (0, false),
+ ExprKind::Call(..) => (PREC_POSTFIX, matches!(expr.kind, ExprKind::Field(..))),
+ _ => (e.precedence().order(), false),
+ },
+ _ => (0, false),
+ };
let sugg = if !snip_is_macro
+ && (calls_field || expr.precedence().order() < precedence)
&& !has_enclosing_paren(&snip)
- && (expr.precedence().order() < data.position.precedence() || calls_field)
{
format!("({snip})")
} else {
@@ -1561,7 +1348,8 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
| ExprKind::If(..)
| ExprKind::Loop(..)
| ExprKind::Match(..)
- ) && matches!(data.position, Position::DerefStable(_, true))
+ ) && let ty::Ref(_, ty, _) = data.adjusted_ty.kind()
+ && ty.is_sized(cx.tcx, cx.param_env)
{
// Rustc bug: auto deref doesn't work on block expression when targeting sized types.
return;
@@ -1574,9 +1362,9 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
Mutability::Not => "&",
Mutability::Mut => "&mut ",
};
- (prefix, 0)
+ (prefix, PREC_PREFIX)
} else {
- ("", data.position.precedence())
+ ("", 0)
};
span_lint_hir_and_then(
cx,
@@ -1593,7 +1381,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
} else {
format!("{prefix}{snip}")
};
- diag.span_suggestion(data.span, "try this", sugg, app);
+ diag.span_suggestion(data.span, "try", sugg, app);
},
);
},
@@ -1605,7 +1393,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
| ExprKind::If(..)
| ExprKind::Loop(..)
| ExprKind::Match(..)
- ) && matches!(data.position, Position::DerefStable(_, true))
+ ) && data.adjusted_ty.is_sized(cx.tcx, cx.param_env)
{
// Rustc bug: auto deref doesn't work on block expression when targeting sized types.
return;
@@ -1620,7 +1408,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data
|diag| {
let mut app = Applicability::MachineApplicable;
let snip = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app).0;
- diag.span_suggestion(data.span, "try this", snip.into_owned(), app);
+ diag.span_suggestion(data.span, "try", snip.into_owned(), app);
},
);
},
diff --git a/src/tools/clippy/clippy_lints/src/derivable_impls.rs b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
index 020ffe7f8..9a85cc4ce 100644
--- a/src/tools/clippy/clippy_lints/src/derivable_impls.rs
+++ b/src/tools/clippy/clippy_lints/src/derivable_impls.rs
@@ -3,14 +3,13 @@ use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::indent_of;
use clippy_utils::{is_default_equivalent, peel_blocks};
use rustc_errors::Applicability;
+use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::{
- self as hir,
- def::{CtorKind, CtorOf, DefKind, Res},
- Body, Expr, ExprKind, GenericArg, Impl, ImplItemKind, Item, ItemKind, Node, PathSegment, QPath, TyKind,
+ self as hir, Body, Expr, ExprKind, GenericArg, Impl, ImplItemKind, Item, ItemKind, Node, PathSegment, QPath, TyKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::adjustment::{Adjust, PointerCoercion};
-use rustc_middle::ty::{self, Adt, AdtDef, SubstsRef, Ty, TypeckResults};
+use rustc_middle::ty::{self, Adt, AdtDef, GenericArgsRef, Ty, TypeckResults};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
@@ -80,7 +79,7 @@ fn is_path_self(e: &Expr<'_>) -> bool {
fn contains_trait_object(ty: Ty<'_>) -> bool {
match ty.kind() {
ty::Ref(_, ty, _) => contains_trait_object(*ty),
- ty::Adt(def, substs) => def.is_box() && substs[0].as_type().map_or(false, contains_trait_object),
+ ty::Adt(def, args) => def.is_box() && args[0].as_type().map_or(false, contains_trait_object),
ty::Dynamic(..) => true,
_ => false,
}
@@ -92,18 +91,19 @@ fn check_struct<'tcx>(
self_ty: &hir::Ty<'_>,
func_expr: &Expr<'_>,
adt_def: AdtDef<'_>,
- substs: SubstsRef<'_>,
+ ty_args: GenericArgsRef<'_>,
typeck_results: &'tcx TypeckResults<'tcx>,
) {
if let TyKind::Path(QPath::Resolved(_, p)) = self_ty.kind {
if let Some(PathSegment { args, .. }) = p.segments.last() {
let args = args.map(|a| a.args).unwrap_or(&[]);
- // substs contains the generic parameters of the type declaration, while args contains the arguments
- // used at instantiation time. If both len are not equal, it means that some parameters were not
- // provided (which means that the default values were used); in this case we will not risk
- // suggesting too broad a rewrite. We won't either if any argument is a type or a const.
- if substs.len() != args.len() || args.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))) {
+ // ty_args contains the generic parameters of the type declaration, while args contains the
+ // arguments used at instantiation time. If both len are not equal, it means that some
+ // parameters were not provided (which means that the default values were used); in this
+ // case we will not risk suggesting too broad a rewrite. We won't either if any argument
+ // is a type or a const.
+ if ty_args.len() != args.len() || args.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))) {
return;
}
}
@@ -214,7 +214,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
if let Some(Node::ImplItem(impl_item)) = cx.tcx.hir().find(impl_item_hir);
if let ImplItemKind::Fn(_, b) = &impl_item.kind;
if let Body { value: func_expr, .. } = cx.tcx.hir().body(*b);
- if let &Adt(adt_def, substs) = cx.tcx.type_of(item.owner_id).subst_identity().kind();
+ if let &Adt(adt_def, args) = cx.tcx.type_of(item.owner_id).instantiate_identity().kind();
if let attrs = cx.tcx.hir().attrs(item.hir_id());
if !attrs.iter().any(|attr| attr.doc_str().is_some());
if let child_attrs = cx.tcx.hir().attrs(impl_item_hir);
@@ -222,7 +222,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls {
then {
if adt_def.is_struct() {
- check_struct(cx, item, self_ty, func_expr, adt_def, substs, cx.tcx.typeck_body(*b));
+ check_struct(cx, item, self_ty, func_expr, adt_def, args, cx.tcx.typeck_body(*b));
} else if adt_def.is_enum() && self.msrv.meets(msrvs::DEFAULT_ENUM_ATTRIBUTE) {
check_enum(cx, item, func_expr, adt_def);
}
diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs
index a005a360e..d3311792c 100644
--- a/src/tools/clippy/clippy_lints/src/derive.rs
+++ b/src/tools/clippy/clippy_lints/src/derive.rs
@@ -1,21 +1,19 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then};
-use clippy_utils::paths;
use clippy_utils::ty::{implements_trait, implements_trait_with_env, is_copy};
-use clippy_utils::{is_lint_allowed, match_def_path};
+use clippy_utils::{is_lint_allowed, match_def_path, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_expr, walk_fn, walk_item, FnKind, Visitor};
use rustc_hir::{
- self as hir, BlockCheckMode, BodyId, Constness, Expr, ExprKind, FnDecl, Impl, Item, ItemKind, UnsafeSource,
- Unsafety,
+ self as hir, BlockCheckMode, BodyId, Expr, ExprKind, FnDecl, Impl, Item, ItemKind, UnsafeSource, Unsafety,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::traits::Reveal;
use rustc_middle::ty::{
- self, BoundConstness, ClauseKind, GenericArgKind, GenericParamDefKind, ImplPolarity, ParamEnv, ToPredicate,
- TraitPredicate, Ty, TyCtxt,
+ self, ClauseKind, GenericArgKind, GenericParamDefKind, ImplPolarity, ParamEnv, ToPredicate, TraitPredicate, Ty,
+ TyCtxt,
};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::def_id::LocalDefId;
@@ -211,7 +209,7 @@ impl<'tcx> LateLintPass<'tcx> for Derive {
..
}) = item.kind
{
- let ty = cx.tcx.type_of(item.owner_id).subst_identity();
+ let ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
let is_automatically_derived = cx.tcx.has_attr(item.owner_id, sym::automatically_derived);
check_hash_peq(cx, item.span, trait_ref, ty, is_automatically_derived);
@@ -252,7 +250,7 @@ fn check_hash_peq<'tcx>(
// Only care about `impl PartialEq<Foo> for Foo`
// For `impl PartialEq<B> for A, input_types is [A, B]
- if trait_ref.subst_identity().substs.type_at(1) == ty {
+ if trait_ref.instantiate_identity().args.type_at(1) == ty {
span_lint_and_then(
cx,
DERIVED_HASH_WITH_MANUAL_EQ,
@@ -300,7 +298,7 @@ fn check_ord_partial_ord<'tcx>(
// Only care about `impl PartialOrd<Foo> for Foo`
// For `impl PartialOrd<B> for A, input_types is [A, B]
- if trait_ref.subst_identity().substs.type_at(1) == ty {
+ if trait_ref.instantiate_identity().args.type_at(1) == ty {
let mess = if partial_ord_is_automatically_derived {
"you are implementing `Ord` explicitly but have derived `PartialOrd`"
} else {
@@ -334,7 +332,9 @@ fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &h
Some(id) if trait_ref.trait_def_id() == Some(id) => id,
_ => return,
};
- let Some(copy_id) = cx.tcx.lang_items().copy_trait() else { return };
+ let Some(copy_id) = cx.tcx.lang_items().copy_trait() else {
+ return;
+ };
let (ty_adt, ty_subs) = match *ty.kind() {
// Unions can't derive clone.
ty::Adt(adt, subs) if !adt.is_union() => (adt, subs),
@@ -345,9 +345,10 @@ fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &h
if !is_copy(cx, ty) {
if ty_subs.non_erasable_generics().next().is_some() {
let has_copy_impl = cx.tcx.all_local_trait_impls(()).get(&copy_id).map_or(false, |impls| {
- impls
- .iter()
- .any(|&id| matches!(cx.tcx.type_of(id).subst_identity().kind(), ty::Adt(adt, _) if ty_adt.did() == adt.did()))
+ impls.iter().any(|&id| {
+ matches!(cx.tcx.type_of(id).instantiate_identity().kind(), ty::Adt(adt, _)
+ if ty_adt.did() == adt.did())
+ })
});
if !has_copy_impl {
return;
@@ -464,18 +465,18 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> {
/// Implementation of the `DERIVE_PARTIAL_EQ_WITHOUT_EQ` lint.
fn check_partial_eq_without_eq<'tcx>(cx: &LateContext<'tcx>, span: Span, trait_ref: &hir::TraitRef<'_>, ty: Ty<'tcx>) {
if_chain! {
- if let ty::Adt(adt, substs) = ty.kind();
+ if let ty::Adt(adt, args) = ty.kind();
if cx.tcx.visibility(adt.did()).is_public();
if let Some(eq_trait_def_id) = cx.tcx.get_diagnostic_item(sym::Eq);
if let Some(def_id) = trait_ref.trait_def_id();
if cx.tcx.is_diagnostic_item(sym::PartialEq, def_id);
let param_env = param_env_for_derived_eq(cx.tcx, adt.did(), eq_trait_def_id);
- if !implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, []);
+ if !implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, &[]);
// If all of our fields implement `Eq`, we can implement `Eq` too
if adt
.all_fields()
- .map(|f| f.ty(cx.tcx, substs))
- .all(|ty| implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, []));
+ .map(|f| f.ty(cx.tcx, args))
+ .all(|ty| implements_trait_with_env(cx.tcx, param_env, ty, eq_trait_def_id, &[]));
then {
span_lint_and_sugg(
cx,
@@ -506,7 +507,6 @@ fn param_env_for_derived_eq(tcx: TyCtxt<'_>, did: DefId, eq_trait_id: DefId) ->
if let ClauseKind::Trait(p) = p.kind().skip_binder()
&& p.trait_ref.def_id == eq_trait_id
&& let ty::Param(self_ty) = p.trait_ref.self_ty().kind()
- && p.constness == BoundConstness::NotConst
{
// Flag types which already have an `Eq` bound.
params[self_ty.index as usize].1 = false;
@@ -518,13 +518,11 @@ fn param_env_for_derived_eq(tcx: TyCtxt<'_>, did: DefId, eq_trait_id: DefId) ->
params.iter().filter(|&&(_, needs_eq)| needs_eq).map(|&(param, _)| {
ClauseKind::Trait(TraitPredicate {
trait_ref: ty::TraitRef::new(tcx, eq_trait_id, [tcx.mk_param_from_def(param)]),
- constness: BoundConstness::NotConst,
polarity: ImplPolarity::Positive,
})
.to_predicate(tcx)
}),
)),
Reveal::UserFacing,
- Constness::NotConst,
)
}
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
index ca8671c8f..95d3f7547 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs
@@ -94,7 +94,7 @@ impl<'tcx> LateLintPass<'tcx> for DisallowedMethods {
path_def_id(cx, expr)
};
let Some(def_id) = uncalled_path.or_else(|| fn_def_id(cx, expr)) else {
- return
+ return;
};
let conf = match self.disallowed.get(&def_id) {
Some(&index) => &self.conf_disallowed[index],
diff --git a/src/tools/clippy/clippy_lints/src/disallowed_names.rs b/src/tools/clippy/clippy_lints/src/disallowed_names.rs
index 6e6615f08..04c2d4413 100644
--- a/src/tools/clippy/clippy_lints/src/disallowed_names.rs
+++ b/src/tools/clippy/clippy_lints/src/disallowed_names.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint, is_test_module_or_function};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_test_module_or_function;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::{Item, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/doc.rs b/src/tools/clippy/clippy_lints/src/doc.rs
index 87d88f707..e29ab634c 100644
--- a/src/tools/clippy/clippy_lints/src/doc.rs
+++ b/src/tools/clippy/clippy_lints/src/doc.rs
@@ -16,7 +16,7 @@ use rustc_ast::token::CommentKind;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lrc;
use rustc_errors::emitter::EmitterWriter;
-use rustc_errors::{Applicability, Handler, SuggestionStyle, TerminalUrl};
+use rustc_errors::{Applicability, Handler, SuggestionStyle};
use rustc_hir as hir;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{AnonConst, Expr};
@@ -31,9 +31,8 @@ use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::edition::Edition;
use rustc_span::source_map::{BytePos, FilePathMapping, SourceMap, Span};
use rustc_span::{sym, FileName, Pos};
-use std::io;
use std::ops::Range;
-use std::thread;
+use std::{io, thread};
use url::Url;
declare_clippy_lint! {
@@ -295,7 +294,9 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return };
+ let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else {
+ return;
+ };
match item.kind {
hir::ItemKind::Fn(ref sig, _, body_id) => {
if !(is_entrypoint_fn(cx, item.owner_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) {
@@ -339,7 +340,9 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return };
+ let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else {
+ return;
+ };
if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
if !in_external_macro(cx.tcx.sess, item.span) {
lint_for_missing_headers(cx, item.owner_id, sig, headers, None, None);
@@ -349,7 +352,9 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
- let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return };
+ let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else {
+ return;
+ };
if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) {
return;
}
@@ -711,20 +716,8 @@ fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, span: Span) {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let fallback_bundle =
rustc_errors::fallback_fluent_bundle(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
- let emitter = EmitterWriter::new(
- Box::new(io::sink()),
- None,
- None,
- fallback_bundle,
- false,
- false,
- false,
- None,
- false,
- false,
- TerminalUrl::No,
- );
- let handler = Handler::with_emitter(false, None, Box::new(emitter));
+ let emitter = EmitterWriter::new(Box::new(io::sink()), fallback_bundle);
+ let handler = Handler::with_emitter(Box::new(emitter)).disable_warnings();
let sess = ParseSess::with_span_handler(handler, sm);
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
diff --git a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
index 976ce47e8..14122abbf 100644
--- a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_note;
-use clippy_utils::get_parent_node;
-use clippy_utils::is_must_use_func_call;
use clippy_utils::ty::{is_copy, is_must_use_ty, is_type_lang_item};
+use clippy_utils::{get_parent_node, is_must_use_func_call};
use rustc_hir::{Arm, Expr, ExprKind, LangItem, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
diff --git a/src/tools/clippy/clippy_lints/src/empty_drop.rs b/src/tools/clippy/clippy_lints/src/empty_drop.rs
index ec063c0f7..209fb66fa 100644
--- a/src/tools/clippy/clippy_lints/src/empty_drop.rs
+++ b/src/tools/clippy/clippy_lints/src/empty_drop.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, peel_blocks};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::peel_blocks;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Body, ExprKind, Impl, ImplItemKind, Item, ItemKind, Node};
diff --git a/src/tools/clippy/clippy_lints/src/empty_enum.rs b/src/tools/clippy/clippy_lints/src/empty_enum.rs
index d94664daa..1701d0611 100644
--- a/src/tools/clippy/clippy_lints/src/empty_enum.rs
+++ b/src/tools/clippy/clippy_lints/src/empty_enum.rs
@@ -49,7 +49,7 @@ impl<'tcx> LateLintPass<'tcx> for EmptyEnum {
}
if let ItemKind::Enum(..) = item.kind {
- let ty = cx.tcx.type_of(item.owner_id).subst_identity();
+ let ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
let adt = ty.ty_adt_def().expect("already checked whether this is an enum");
if adt.variants().is_empty() {
span_lint_and_help(
diff --git a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
index c3a020433..282157181 100644
--- a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
+++ b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint_and_then, source::snippet_opt};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_opt;
use rustc_ast::ast::{Item, ItemKind, VariantData};
use rustc_errors::Applicability;
use rustc_lexer::TokenKind;
diff --git a/src/tools/clippy/clippy_lints/src/endian_bytes.rs b/src/tools/clippy/clippy_lints/src/endian_bytes.rs
index f47098783..dda14b4df 100644
--- a/src/tools/clippy/clippy_lints/src/endian_bytes.rs
+++ b/src/tools/clippy/clippy_lints/src/endian_bytes.rs
@@ -1,8 +1,10 @@
use crate::Lint;
-use clippy_utils::{diagnostics::span_lint_and_then, is_lint_allowed};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::is_lint_allowed;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::{lint::in_external_macro, ty::Ty};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::Ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::Symbol;
use std::borrow::Cow;
diff --git a/src/tools/clippy/clippy_lints/src/entry.rs b/src/tools/clippy/clippy_lints/src/entry.rs
index ee5a875ad..6197b5b19 100644
--- a/src/tools/clippy/clippy_lints/src/entry.rs
+++ b/src/tools/clippy/clippy_lints/src/entry.rs
@@ -1,18 +1,14 @@
-use clippy_utils::higher;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{reindent_multiline, snippet_indent, snippet_with_applicability, snippet_with_context};
use clippy_utils::{
- can_move_expr_to_closure_no_visit,
- diagnostics::span_lint_and_sugg,
- is_expr_final_block_expr, is_expr_used_or_unified, match_def_path, paths, peel_hir_expr_while,
- source::{reindent_multiline, snippet_indent, snippet_with_applicability, snippet_with_context},
- SpanlessEq,
+ can_move_expr_to_closure_no_visit, higher, is_expr_final_block_expr, is_expr_used_or_unified, match_def_path,
+ paths, peel_hir_expr_while, SpanlessEq,
};
use core::fmt::{self, Write};
use rustc_errors::Applicability;
-use rustc_hir::{
- hir_id::HirIdSet,
- intravisit::{walk_expr, Visitor},
- Block, Expr, ExprKind, Guard, HirId, Let, Pat, Stmt, StmtKind, UnOp,
-};
+use rustc_hir::hir_id::HirIdSet;
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{Block, Expr, ExprKind, Guard, HirId, Let, Pat, Stmt, StmtKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{Span, SyntaxContext, DUMMY_SP};
@@ -69,16 +65,21 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass {
return;
}
- let Some(higher::If { cond: cond_expr, then: then_expr, r#else: else_expr }) = higher::If::hir(expr) else {
- return
+ let Some(higher::If {
+ cond: cond_expr,
+ then: then_expr,
+ r#else: else_expr,
+ }) = higher::If::hir(expr)
+ else {
+ return;
};
let Some((map_ty, contains_expr)) = try_parse_contains(cx, cond_expr) else {
- return
+ return;
};
let Some(then_search) = find_insert_calls(cx, &contains_expr, then_expr) else {
- return
+ return;
};
let mut app = Applicability::MachineApplicable;
@@ -186,7 +187,7 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass {
MAP_ENTRY,
expr.span,
&format!("usage of `contains_key` followed by `insert` on a `{}`", map_ty.name()),
- "try this",
+ "try",
sugg,
app,
);
diff --git a/src/tools/clippy/clippy_lints/src/enum_clike.rs b/src/tools/clippy/clippy_lints/src/enum_clike.rs
index d85650712..96c5c7fc5 100644
--- a/src/tools/clippy/clippy_lints/src/enum_clike.rs
+++ b/src/tools/clippy/clippy_lints/src/enum_clike.rs
@@ -45,7 +45,7 @@ impl<'tcx> LateLintPass<'tcx> for UnportableVariant {
for var in def.variants {
if let Some(anon_const) = &var.disr_expr {
let def_id = cx.tcx.hir().body_owner_def_id(anon_const.body);
- let mut ty = cx.tcx.type_of(def_id.to_def_id()).subst_identity();
+ let mut ty = cx.tcx.type_of(def_id.to_def_id()).instantiate_identity();
let constant = cx
.tcx
.const_eval_poly(def_id.to_def_id())
diff --git a/src/tools/clippy/clippy_lints/src/error_impl_error.rs b/src/tools/clippy/clippy_lints/src/error_impl_error.rs
new file mode 100644
index 000000000..379af9b22
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/error_impl_error.rs
@@ -0,0 +1,87 @@
+use clippy_utils::diagnostics::{span_lint, span_lint_hir_and_then};
+use clippy_utils::path_res;
+use clippy_utils::ty::implements_trait;
+use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_hir::{Item, ItemKind};
+use rustc_hir_analysis::hir_ty_to_ty;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::ty::Visibility;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::sym;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for types named `Error` that implement `Error`.
+ ///
+ /// ### Why is this bad?
+ /// It can become confusing when a codebase has 20 types all named `Error`, requiring either
+ /// aliasing them in the `use` statement or qualifying them like `my_module::Error`. This
+ /// hinders comprehension, as it requires you to memorize every variation of importing `Error`
+ /// used across a codebase.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// #[derive(Debug)]
+ /// pub enum Error { ... }
+ ///
+ /// impl std::fmt::Display for Error { ... }
+ ///
+ /// impl std::error::Error for Error { ... }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub ERROR_IMPL_ERROR,
+ restriction,
+ "exported types named `Error` that implement `Error`"
+}
+declare_lint_pass!(ErrorImplError => [ERROR_IMPL_ERROR]);
+
+impl<'tcx> LateLintPass<'tcx> for ErrorImplError {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ let Some(error_def_id) = cx.tcx.get_diagnostic_item(sym::Error) else {
+ return;
+ };
+
+ match item.kind {
+ ItemKind::TyAlias(ty, _) if implements_trait(cx, hir_ty_to_ty(cx.tcx, ty), error_def_id, &[])
+ && item.ident.name == sym::Error
+ && is_visible_outside_module(cx, item.owner_id.def_id) =>
+ {
+ span_lint(
+ cx,
+ ERROR_IMPL_ERROR,
+ item.ident.span,
+ "exported type alias named `Error` that implements `Error`",
+ );
+ },
+ ItemKind::Impl(imp) if let Some(trait_def_id) = imp.of_trait.and_then(|t| t.trait_def_id())
+ && error_def_id == trait_def_id
+ && let Some(def_id) = path_res(cx, imp.self_ty).opt_def_id().and_then(DefId::as_local)
+ && let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id)
+ && let Some(ident) = cx.tcx.opt_item_ident(def_id.to_def_id())
+ && ident.name == sym::Error
+ && is_visible_outside_module(cx, def_id) =>
+ {
+ span_lint_hir_and_then(
+ cx,
+ ERROR_IMPL_ERROR,
+ hir_id,
+ ident.span,
+ "exported type named `Error` that implements `Error`",
+ |diag| {
+ diag.span_note(item.span, "`Error` was implemented here");
+ }
+ );
+ }
+ _ => {},
+ }
+ }
+}
+
+/// Do not lint private `Error`s, i.e., ones without any `pub` (minus `pub(self)` of course) and
+/// which aren't reexported
+fn is_visible_outside_module(cx: &LateContext<'_>, def_id: LocalDefId) -> bool {
+ !matches!(
+ cx.tcx.visibility(def_id),
+ Visibility::Restricted(mod_def_id) if cx.tcx.parent_module_from_def_id(def_id).to_def_id() == mod_def_id
+ )
+}
diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs
index a51a8ee09..dbe3453e7 100644
--- a/src/tools/clippy/clippy_lints/src/escape.rs
+++ b/src/tools/clippy/clippy_lints/src/escape.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::span_lint_hir;
-use rustc_hir::intravisit;
-use rustc_hir::{self, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind};
+use rustc_hir::{self, intravisit, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind};
use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs
index 58e62d1f3..38066503c 100644
--- a/src/tools/clippy/clippy_lints/src/eta_reduction.rs
+++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs
@@ -1,19 +1,22 @@
use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::higher::VecArgs;
use clippy_utils::source::snippet_opt;
-use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
-use clippy_utils::usage::local_used_after_expr;
+use clippy_utils::ty::type_diagnostic_name;
+use clippy_utils::usage::{local_used_after_expr, local_used_in};
use clippy_utils::{higher, is_adjusted, path_to_local, path_to_local_id};
-use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
-use rustc_hir::{Closure, Expr, ExprKind, Param, PatKind, Unsafety};
+use rustc_hir::{BindingAnnotation, Expr, ExprKind, FnRetTy, Param, PatKind, QPath, TyKind, Unsafety};
+use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
-use rustc_middle::ty::binding::BindingMode;
-use rustc_middle::ty::{self, EarlyBinder, SubstsRef, Ty, TypeVisitableExt};
+use rustc_middle::ty::{
+ self, Binder, ClosureArgs, ClosureKind, EarlyBinder, FnSig, GenericArg, GenericArgKind, GenericArgsRef,
+ ImplPolarity, List, Region, RegionKind, Ty, TypeVisitableExt, TypeckResults,
+};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::sym;
+use rustc_target::spec::abi::Abi;
+use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
declare_clippy_lint! {
/// ### What it does
@@ -72,14 +75,18 @@ declare_clippy_lint! {
declare_lint_pass!(EtaReduction => [REDUNDANT_CLOSURE, REDUNDANT_CLOSURE_FOR_METHOD_CALLS]);
impl<'tcx> LateLintPass<'tcx> for EtaReduction {
+ #[allow(clippy::too_many_lines)]
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if expr.span.from_expansion() {
+ let body = if let ExprKind::Closure(c) = expr.kind
+ && c.fn_decl.inputs.iter().all(|ty| matches!(ty.kind, TyKind::Infer))
+ && matches!(c.fn_decl.output, FnRetTy::DefaultReturn(_))
+ && !expr.span.from_expansion()
+ {
+ cx.tcx.hir().body(c.body)
+ } else {
return;
- }
- let body = match expr.kind {
- ExprKind::Closure(&Closure { body, .. }) => cx.tcx.hir().body(body),
- _ => return,
};
+
if body.value.span.from_expansion() {
if body.params.is_empty() {
if let Some(VecArgs::Vec(&[])) = higher::VecArgs::hir(cx, body.value) {
@@ -99,149 +106,217 @@ impl<'tcx> LateLintPass<'tcx> for EtaReduction {
return;
}
- let closure_ty = cx.typeck_results().expr_ty(expr);
+ let typeck = cx.typeck_results();
+ let closure = if let ty::Closure(_, closure_subs) = typeck.expr_ty(expr).kind() {
+ closure_subs.as_closure()
+ } else {
+ return;
+ };
- if_chain!(
- if !is_adjusted(cx, body.value);
- if let ExprKind::Call(callee, args) = body.value.kind;
- if let ExprKind::Path(_) = callee.kind;
- if check_inputs(cx, body.params, None, args);
- let callee_ty = cx.typeck_results().expr_ty_adjusted(callee);
- let call_ty = cx.typeck_results().type_dependent_def_id(body.value.hir_id)
- .map_or(callee_ty, |id| cx.tcx.type_of(id).subst_identity());
- if check_sig(cx, closure_ty, call_ty);
- let substs = cx.typeck_results().node_substs(callee.hir_id);
- // This fixes some false positives that I don't entirely understand
- if substs.is_empty() || !cx.typeck_results().expr_ty(expr).has_late_bound_regions();
- // A type param function ref like `T::f` is not 'static, however
- // it is if cast like `T::f as fn()`. This seems like a rustc bug.
- if !substs.types().any(|t| matches!(t.kind(), ty::Param(_)));
- let callee_ty_unadjusted = cx.typeck_results().expr_ty(callee).peel_refs();
- if !is_type_diagnostic_item(cx, callee_ty_unadjusted, sym::Arc);
- if !is_type_diagnostic_item(cx, callee_ty_unadjusted, sym::Rc);
- if let ty::Closure(_, substs) = *closure_ty.kind();
- // Don't lint if this is an inclusive range expression.
- // They desugar to a call to `RangeInclusiveNew` which would have odd suggestions. (#10684)
- if !matches!(higher::Range::hir(body.value), Some(higher::Range {
- start: Some(_),
- end: Some(_),
- limits: rustc_ast::RangeLimits::Closed
- }));
- then {
- span_lint_and_then(cx, REDUNDANT_CLOSURE, expr.span, "redundant closure", |diag| {
- if let Some(mut snippet) = snippet_opt(cx, callee.span) {
- if let Some(fn_mut_id) = cx.tcx.lang_items().fn_mut_trait()
- && let args = cx.tcx.erase_late_bound_regions(substs.as_closure().sig()).inputs()
- && implements_trait(
- cx,
- callee_ty.peel_refs(),
- fn_mut_id,
- &args.iter().copied().map(Into::into).collect::<Vec<_>>(),
- )
- && path_to_local(callee).map_or(false, |l| local_used_after_expr(cx, l, expr))
- {
- // Mutable closure is used after current expr; we cannot consume it.
- snippet = format!("&mut {snippet}");
- }
+ if is_adjusted(cx, body.value) {
+ return;
+ }
- diag.span_suggestion(
- expr.span,
- "replace the closure with the function itself",
- snippet,
- Applicability::MachineApplicable,
- );
- }
- });
- }
- );
+ match body.value.kind {
+ ExprKind::Call(callee, args)
+ if matches!(callee.kind, ExprKind::Path(QPath::Resolved(..) | QPath::TypeRelative(..))) =>
+ {
+ let callee_ty = typeck.expr_ty(callee).peel_refs();
+ if matches!(
+ type_diagnostic_name(cx, callee_ty),
+ Some(sym::Arc | sym::Rc)
+ ) || !check_inputs(typeck, body.params, None, args) {
+ return;
+ }
+ let callee_ty_adjusted = typeck.expr_adjustments(callee).last().map_or(
+ callee_ty,
+ |a| a.target.peel_refs(),
+ );
- if_chain!(
- if !is_adjusted(cx, body.value);
- if let ExprKind::MethodCall(path, receiver, args, _) = body.value.kind;
- if check_inputs(cx, body.params, Some(receiver), args);
- let method_def_id = cx.typeck_results().type_dependent_def_id(body.value.hir_id).unwrap();
- let substs = cx.typeck_results().node_substs(body.value.hir_id);
- let call_ty = cx.tcx.type_of(method_def_id).subst(cx.tcx, substs);
- if check_sig(cx, closure_ty, call_ty);
- then {
- span_lint_and_then(cx, REDUNDANT_CLOSURE_FOR_METHOD_CALLS, expr.span, "redundant closure", |diag| {
- let name = get_ufcs_type_name(cx, method_def_id, substs);
- diag.span_suggestion(
+ let sig = match callee_ty_adjusted.kind() {
+ ty::FnDef(def, _) => cx.tcx.fn_sig(def).skip_binder().skip_binder(),
+ ty::FnPtr(sig) => sig.skip_binder(),
+ ty::Closure(_, subs) => cx
+ .tcx
+ .signature_unclosure(subs.as_closure().sig(), Unsafety::Normal)
+ .skip_binder(),
+ _ => {
+ if typeck.type_dependent_def_id(body.value.hir_id).is_some()
+ && let subs = typeck.node_args(body.value.hir_id)
+ && let output = typeck.expr_ty(body.value)
+ && let ty::Tuple(tys) = *subs.type_at(1).kind()
+ {
+ cx.tcx.mk_fn_sig(tys, output, false, Unsafety::Normal, Abi::Rust)
+ } else {
+ return;
+ }
+ },
+ };
+ if check_sig(cx, closure, sig)
+ && let generic_args = typeck.node_args(callee.hir_id)
+ // Given some trait fn `fn f() -> ()` and some type `T: Trait`, `T::f` is not
+ // `'static` unless `T: 'static`. The cast `T::f as fn()` will, however, result
+ // in a type which is `'static`.
+ // For now ignore all callee types which reference a type parameter.
+ && !generic_args.types().any(|t| matches!(t.kind(), ty::Param(_)))
+ {
+ span_lint_and_then(
+ cx,
+ REDUNDANT_CLOSURE,
expr.span,
- "replace the closure with the method itself",
- format!("{name}::{}", path.ident.name),
- Applicability::MachineApplicable,
+ "redundant closure",
+ |diag| {
+ if let Some(mut snippet) = snippet_opt(cx, callee.span) {
+ if let Ok((ClosureKind::FnMut, _))
+ = cx.tcx.infer_ctxt().build().type_implements_fn_trait(
+ cx.param_env,
+ Binder::bind_with_vars(callee_ty_adjusted, List::empty()),
+ ImplPolarity::Positive,
+ ) && path_to_local(callee)
+ .map_or(
+ false,
+ |l| local_used_in(cx, l, args) || local_used_after_expr(cx, l, expr),
+ )
+ {
+ // Mutable closure is used after current expr; we cannot consume it.
+ snippet = format!("&mut {snippet}");
+ }
+ diag.span_suggestion(
+ expr.span,
+ "replace the closure with the function itself",
+ snippet,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
);
- })
- }
- );
+ }
+ },
+ ExprKind::MethodCall(path, self_, args, _) if check_inputs(typeck, body.params, Some(self_), args) => {
+ if let Some(method_def_id) = typeck.type_dependent_def_id(body.value.hir_id)
+ && check_sig(cx, closure, cx.tcx.fn_sig(method_def_id).skip_binder().skip_binder())
+ {
+ span_lint_and_then(
+ cx,
+ REDUNDANT_CLOSURE_FOR_METHOD_CALLS,
+ expr.span,
+ "redundant closure",
+ |diag| {
+ let args = typeck.node_args(body.value.hir_id);
+ let name = get_ufcs_type_name(cx, method_def_id, args);
+ diag.span_suggestion(
+ expr.span,
+ "replace the closure with the method itself",
+ format!("{}::{}", name, path.ident.name),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ },
+ _ => (),
+ }
}
}
fn check_inputs(
- cx: &LateContext<'_>,
+ typeck: &TypeckResults<'_>,
params: &[Param<'_>],
- receiver: Option<&Expr<'_>>,
- call_args: &[Expr<'_>],
+ self_arg: Option<&Expr<'_>>,
+ args: &[Expr<'_>],
) -> bool {
- if receiver.map_or(params.len() != call_args.len(), |_| params.len() != call_args.len() + 1) {
- return false;
+ params.len() == self_arg.map_or(0, |_| 1) + args.len()
+ && params.iter().zip(self_arg.into_iter().chain(args)).all(|(p, arg)| {
+ matches!(
+ p.pat.kind,PatKind::Binding(BindingAnnotation::NONE, id, _, None)
+ if path_to_local_id(arg, id)
+ )
+ // Only allow adjustments which change regions (i.e. re-borrowing).
+ && typeck
+ .expr_adjustments(arg)
+ .last()
+ .map_or(true, |a| a.target == typeck.expr_ty(arg))
+ })
+}
+
+fn check_sig<'tcx>(cx: &LateContext<'tcx>, closure: ClosureArgs<'tcx>, call_sig: FnSig<'_>) -> bool {
+ call_sig.unsafety == Unsafety::Normal
+ && !has_late_bound_to_non_late_bound_regions(
+ cx.tcx
+ .signature_unclosure(closure.sig(), Unsafety::Normal)
+ .skip_binder(),
+ call_sig,
+ )
+}
+
+/// This walks through both signatures and checks for any time a late-bound region is expected by an
+/// `impl Fn` type, but the target signature does not have a late-bound region in the same position.
+///
+/// This is needed because rustc is unable to late bind early-bound regions in a function signature.
+fn has_late_bound_to_non_late_bound_regions(from_sig: FnSig<'_>, to_sig: FnSig<'_>) -> bool {
+ fn check_region(from_region: Region<'_>, to_region: Region<'_>) -> bool {
+ matches!(from_region.kind(), RegionKind::ReLateBound(..))
+ && !matches!(to_region.kind(), RegionKind::ReLateBound(..))
}
- let binding_modes = cx.typeck_results().pat_binding_modes();
- let check_inputs = |param: &Param<'_>, arg| {
- match param.pat.kind {
- PatKind::Binding(_, id, ..) if path_to_local_id(arg, id) => {},
- _ => return false,
- }
- // checks that parameters are not bound as `ref` or `ref mut`
- if let Some(BindingMode::BindByReference(_)) = binding_modes.get(param.pat.hir_id) {
- return false;
- }
- match *cx.typeck_results().expr_adjustments(arg) {
- [] => true,
- [
- Adjustment {
- kind: Adjust::Deref(None),
- ..
+ fn check_subs(from_subs: &[GenericArg<'_>], to_subs: &[GenericArg<'_>]) -> bool {
+ if from_subs.len() != to_subs.len() {
+ return true;
+ }
+ for (from_arg, to_arg) in to_subs.iter().zip(from_subs) {
+ match (from_arg.unpack(), to_arg.unpack()) {
+ (GenericArgKind::Lifetime(from_region), GenericArgKind::Lifetime(to_region)) => {
+ if check_region(from_region, to_region) {
+ return true;
+ }
},
- Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(_, mu2)),
- ..
+ (GenericArgKind::Type(from_ty), GenericArgKind::Type(to_ty)) => {
+ if check_ty(from_ty, to_ty) {
+ return true;
+ }
},
- ] => {
- // re-borrow with the same mutability is allowed
- let ty = cx.typeck_results().expr_ty(arg);
- matches!(*ty.kind(), ty::Ref(.., mu1) if mu1 == mu2.into())
- },
- _ => false,
+ (GenericArgKind::Const(_), GenericArgKind::Const(_)) => (),
+ _ => return true,
+ }
}
- };
- std::iter::zip(params, receiver.into_iter().chain(call_args.iter())).all(|(param, arg)| check_inputs(param, arg))
-}
-
-fn check_sig<'tcx>(cx: &LateContext<'tcx>, closure_ty: Ty<'tcx>, call_ty: Ty<'tcx>) -> bool {
- let call_sig = call_ty.fn_sig(cx.tcx);
- if call_sig.unsafety() == Unsafety::Unsafe {
- return false;
+ false
}
- if !closure_ty.has_late_bound_regions() {
- return true;
+
+ fn check_ty(from_ty: Ty<'_>, to_ty: Ty<'_>) -> bool {
+ match (from_ty.kind(), to_ty.kind()) {
+ (&ty::Adt(_, from_subs), &ty::Adt(_, to_subs)) => check_subs(from_subs, to_subs),
+ (&ty::Array(from_ty, _), &ty::Array(to_ty, _)) | (&ty::Slice(from_ty), &ty::Slice(to_ty)) => {
+ check_ty(from_ty, to_ty)
+ },
+ (&ty::Ref(from_region, from_ty, _), &ty::Ref(to_region, to_ty, _)) => {
+ check_region(from_region, to_region) || check_ty(from_ty, to_ty)
+ },
+ (&ty::Tuple(from_tys), &ty::Tuple(to_tys)) => {
+ from_tys.len() != to_tys.len()
+ || from_tys
+ .iter()
+ .zip(to_tys)
+ .any(|(from_ty, to_ty)| check_ty(from_ty, to_ty))
+ },
+ _ => from_ty.has_late_bound_regions(),
+ }
}
- let ty::Closure(_, substs) = closure_ty.kind() else {
- return false;
- };
- let closure_sig = cx.tcx.signature_unclosure(substs.as_closure().sig(), Unsafety::Normal);
- cx.tcx.erase_late_bound_regions(closure_sig) == cx.tcx.erase_late_bound_regions(call_sig)
+
+ assert!(from_sig.inputs_and_output.len() == to_sig.inputs_and_output.len());
+ from_sig
+ .inputs_and_output
+ .iter()
+ .zip(to_sig.inputs_and_output)
+ .any(|(from_ty, to_ty)| check_ty(from_ty, to_ty))
}
-fn get_ufcs_type_name<'tcx>(cx: &LateContext<'tcx>, method_def_id: DefId, substs: SubstsRef<'tcx>) -> String {
+fn get_ufcs_type_name<'tcx>(cx: &LateContext<'tcx>, method_def_id: DefId, args: GenericArgsRef<'tcx>) -> String {
let assoc_item = cx.tcx.associated_item(method_def_id);
let def_id = assoc_item.container_id(cx.tcx);
match assoc_item.container {
ty::TraitContainer => cx.tcx.def_path_str(def_id),
ty::ImplContainer => {
- let ty = cx.tcx.type_of(def_id).skip_binder();
+ let ty = cx.tcx.type_of(def_id).instantiate_identity();
match ty.kind() {
ty::Adt(adt, _) => cx.tcx.def_path_str(adt.did()),
ty::Array(..)
@@ -251,7 +326,7 @@ fn get_ufcs_type_name<'tcx>(cx: &LateContext<'tcx>, method_def_id: DefId, substs
| ty::Ref(..)
| ty::Slice(_)
| ty::Tuple(_) => {
- format!("<{}>", EarlyBinder::bind(ty).subst(cx.tcx, substs))
+ format!("<{}>", EarlyBinder::bind(ty).instantiate(cx.tcx, args))
},
_ => ty.to_string(),
}
diff --git a/src/tools/clippy/clippy_lints/src/excessive_nesting.rs b/src/tools/clippy/clippy_lints/src/excessive_nesting.rs
index d04d833e6..8911f1872 100644
--- a/src/tools/clippy/clippy_lints/src/excessive_nesting.rs
+++ b/src/tools/clippy/clippy_lints/src/excessive_nesting.rs
@@ -1,9 +1,8 @@
-use clippy_utils::{diagnostics::span_lint_and_help, source::snippet};
-use rustc_ast::{
- node_id::NodeSet,
- visit::{walk_block, walk_item, Visitor},
- Block, Crate, Inline, Item, ItemKind, ModKind, NodeId,
-};
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::source::snippet;
+use rustc_ast::node_id::NodeSet;
+use rustc_ast::visit::{walk_block, walk_item, Visitor};
+use rustc_ast::{Block, Crate, Inline, Item, ItemKind, ModKind, NodeId};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
diff --git a/src/tools/clippy/clippy_lints/src/explicit_write.rs b/src/tools/clippy/clippy_lints/src/explicit_write.rs
index 315df6c71..4b9ca8c91 100644
--- a/src/tools/clippy/clippy_lints/src/explicit_write.rs
+++ b/src/tools/clippy/clippy_lints/src/explicit_write.rs
@@ -100,7 +100,7 @@ impl<'tcx> LateLintPass<'tcx> for ExplicitWrite {
EXPLICIT_WRITE,
expr.span,
&format!("use of `{used}.unwrap()`"),
- "try this",
+ "try",
format!("{prefix}{sugg_mac}!({inputs_snippet})"),
applicability,
);
diff --git a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
index 126bed678..c18006a71 100644
--- a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
+++ b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs
@@ -1,6 +1,5 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then};
-use clippy_utils::is_from_proc_macro;
-use clippy_utils::trait_ref_of_method;
+use clippy_utils::{is_from_proc_macro, trait_ref_of_method};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_impl_item, walk_item, walk_param_bound, walk_ty, Visitor};
@@ -12,10 +11,8 @@ use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{
- def_id::{DefId, LocalDefId},
- Span,
-};
+use rustc_span::def_id::{DefId, LocalDefId};
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
index 5e0fcd743..29e5315f8 100644
--- a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
+++ b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs
@@ -1,10 +1,8 @@
-use clippy_utils::consts::{
- constant, constant_simple, Constant,
- Constant::{Int, F32, F64},
-};
+use clippy_utils::consts::Constant::{Int, F32, F64};
+use clippy_utils::consts::{constant, constant_simple, Constant};
+use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::{
- diagnostics::span_lint_and_sugg, eq_expr_value, get_parent_expr, higher, in_constant, is_no_std_crate,
- numeric_literal, peel_blocks, sugg,
+ eq_expr_value, get_parent_expr, higher, in_constant, is_no_std_crate, numeric_literal, peel_blocks, sugg,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
diff --git a/src/tools/clippy/clippy_lints/src/fn_null_check.rs b/src/tools/clippy/clippy_lints/src/fn_null_check.rs
deleted file mode 100644
index 521045a9f..000000000
--- a/src/tools/clippy/clippy_lints/src/fn_null_check.rs
+++ /dev/null
@@ -1,102 +0,0 @@
-use clippy_utils::consts::{constant, Constant};
-use clippy_utils::diagnostics::span_lint_and_help;
-use clippy_utils::{is_integer_literal, is_path_diagnostic_item};
-use rustc_hir::{BinOpKind, Expr, ExprKind, TyKind};
-use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::sym;
-
-declare_clippy_lint! {
- /// ### What it does
- /// Checks for comparing a function pointer to null.
- ///
- /// ### Why is this bad?
- /// Function pointers are assumed to not be null.
- ///
- /// ### Example
- /// ```rust,ignore
- /// let fn_ptr: fn() = /* somehow obtained nullable function pointer */
- ///
- /// if (fn_ptr as *const ()).is_null() { ... }
- /// ```
- /// Use instead:
- /// ```rust,ignore
- /// let fn_ptr: Option<fn()> = /* somehow obtained nullable function pointer */
- ///
- /// if fn_ptr.is_none() { ... }
- /// ```
- #[clippy::version = "1.68.0"]
- pub FN_NULL_CHECK,
- correctness,
- "`fn()` type assumed to be nullable"
-}
-declare_lint_pass!(FnNullCheck => [FN_NULL_CHECK]);
-
-fn lint_expr(cx: &LateContext<'_>, expr: &Expr<'_>) {
- span_lint_and_help(
- cx,
- FN_NULL_CHECK,
- expr.span,
- "function pointer assumed to be nullable, even though it isn't",
- None,
- "try wrapping your function pointer type in `Option<T>` instead, and using `is_none` to check for null pointer value",
- );
-}
-
-fn is_fn_ptr_cast(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- if let ExprKind::Cast(cast_expr, cast_ty) = expr.kind
- && let TyKind::Ptr(_) = cast_ty.kind
- {
- cx.typeck_results().expr_ty_adjusted(cast_expr).is_fn()
- } else {
- false
- }
-}
-
-impl<'tcx> LateLintPass<'tcx> for FnNullCheck {
- fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- match expr.kind {
- // Catching:
- // (fn_ptr as *<const/mut> <ty>).is_null()
- ExprKind::MethodCall(method_name, receiver, _, _)
- if method_name.ident.as_str() == "is_null" && is_fn_ptr_cast(cx, receiver) =>
- {
- lint_expr(cx, expr);
- },
-
- ExprKind::Binary(op, left, right) if matches!(op.node, BinOpKind::Eq) => {
- let to_check: &Expr<'_>;
- if is_fn_ptr_cast(cx, left) {
- to_check = right;
- } else if is_fn_ptr_cast(cx, right) {
- to_check = left;
- } else {
- return;
- }
-
- match to_check.kind {
- // Catching:
- // (fn_ptr as *<const/mut> <ty>) == (0 as <ty>)
- ExprKind::Cast(cast_expr, _) if is_integer_literal(cast_expr, 0) => {
- lint_expr(cx, expr);
- },
-
- // Catching:
- // (fn_ptr as *<const/mut> <ty>) == std::ptr::null()
- ExprKind::Call(func, []) if is_path_diagnostic_item(cx, func, sym::ptr_null) => {
- lint_expr(cx, expr);
- },
-
- // Catching:
- // (fn_ptr as *<const/mut> <ty>) == <const that evaluates to null_ptr>
- _ if matches!(constant(cx, cx.typeck_results(), to_check), Some(Constant::RawPtr(0))) => {
- lint_expr(cx, expr);
- },
-
- _ => {},
- }
- },
- _ => {},
- }
- }
-}
diff --git a/src/tools/clippy/clippy_lints/src/format.rs b/src/tools/clippy/clippy_lints/src/format.rs
index d34d6e927..f4f8bdc2c 100644
--- a/src/tools/clippy/clippy_lints/src/format.rs
+++ b/src/tools/clippy/clippy_lints/src/format.rs
@@ -43,7 +43,9 @@ declare_lint_pass!(UselessFormat => [USELESS_FORMAT]);
impl<'tcx> LateLintPass<'tcx> for UselessFormat {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
if !cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs
index 08e45ed7d..01c714c41 100644
--- a/src/tools/clippy/clippy_lints/src/format_args.rs
+++ b/src/tools/clippy/clippy_lints/src/format_args.rs
@@ -14,10 +14,8 @@ use rustc_ast::{
FormatArgPosition, FormatArgPositionKind, FormatArgsPiece, FormatArgumentKind, FormatCount, FormatOptions,
FormatPlaceholder, FormatTrait,
};
-use rustc_errors::{
- Applicability,
- SuggestionStyle::{CompletelyHidden, ShowCode},
-};
+use rustc_errors::Applicability;
+use rustc_errors::SuggestionStyle::{CompletelyHidden, ShowCode};
use rustc_hir::{Expr, ExprKind, LangItem};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty::adjustment::{Adjust, Adjustment};
@@ -188,7 +186,9 @@ impl FormatArgs {
impl<'tcx> LateLintPass<'tcx> for FormatArgs {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
if !is_format_macro(cx, macro_call.def_id) {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/format_impl.rs b/src/tools/clippy/clippy_lints/src/format_impl.rs
index 3ddee1842..76369bccf 100644
--- a/src/tools/clippy/clippy_lints/src/format_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/format_impl.rs
@@ -7,8 +7,8 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Impl, ImplItem, ImplItemKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::Span;
-use rustc_span::{sym, symbol::kw, Symbol};
+use rustc_span::symbol::kw;
+use rustc_span::{sym, Span, Symbol};
declare_clippy_lint! {
/// ### What it does
@@ -127,7 +127,9 @@ impl<'tcx> LateLintPass<'tcx> for FormatImpl {
}
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let Some(format_trait_impl) = self.format_trait_impl else { return };
+ let Some(format_trait_impl) = self.format_trait_impl else {
+ return;
+ };
if format_trait_impl.name == sym::Display {
check_to_string_in_display(cx, expr);
diff --git a/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs b/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs
new file mode 100644
index 000000000..419c77343
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/four_forward_slashes.rs
@@ -0,0 +1,99 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use rustc_errors::Applicability;
+use rustc_hir::Item;
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::Span;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for outer doc comments written with 4 forward slashes (`////`).
+ ///
+ /// ### Why is this bad?
+ /// This is (probably) a typo, and results in it not being a doc comment; just a regular
+ /// comment.
+ ///
+ /// ### Example
+ /// ```rust
+ /// //// My amazing data structure
+ /// pub struct Foo {
+ /// // ...
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// /// My amazing data structure
+ /// pub struct Foo {
+ /// // ...
+ /// }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub FOUR_FORWARD_SLASHES,
+ suspicious,
+ "comments with 4 forward slashes (`////`) likely intended to be doc comments (`///`)"
+}
+declare_lint_pass!(FourForwardSlashes => [FOUR_FORWARD_SLASHES]);
+
+impl<'tcx> LateLintPass<'tcx> for FourForwardSlashes {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
+ if item.span.from_expansion() {
+ return;
+ }
+ let sm = cx.sess().source_map();
+ let mut span = cx
+ .tcx
+ .hir()
+ .attrs(item.hir_id())
+ .iter()
+ .fold(item.span.shrink_to_lo(), |span, attr| span.to(attr.span));
+ let (Some(file), _, _, end_line, _) = sm.span_to_location_info(span) else {
+ return;
+ };
+ let mut bad_comments = vec![];
+ for line in (0..end_line.saturating_sub(1)).rev() {
+ let Some(contents) = file.get_line(line).map(|c| c.trim().to_owned()) else {
+ return;
+ };
+ // Keep searching until we find the next item
+ if !contents.is_empty() && !contents.starts_with("//") && !contents.starts_with("#[") {
+ break;
+ }
+
+ if contents.starts_with("////") && !matches!(contents.chars().nth(4), Some('/' | '!')) {
+ let bounds = file.line_bounds(line);
+ let line_span = Span::with_root_ctxt(bounds.start, bounds.end);
+ span = line_span.to(span);
+ bad_comments.push((line_span, contents));
+ }
+ }
+
+ if !bad_comments.is_empty() {
+ span_lint_and_then(
+ cx,
+ FOUR_FORWARD_SLASHES,
+ span,
+ "this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't",
+ |diag| {
+ let msg = if bad_comments.len() == 1 {
+ "make this a doc comment by removing one `/`"
+ } else {
+ "turn these into doc comments by removing one `/`"
+ };
+
+ diag.multipart_suggestion(
+ msg,
+ bad_comments
+ .into_iter()
+ // It's a little unfortunate but the span includes the `\n` yet the contents
+ // do not, so we must add it back. If some codebase uses `\r\n` instead they
+ // will need normalization but it should be fine
+ .map(|(span, c)| (span, c.replacen("////", "///", 1) + "\n"))
+ .collect(),
+ Applicability::MachineApplicable,
+ );
+ },
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/from_over_into.rs b/src/tools/clippy/clippy_lints/src/from_over_into.rs
index 92d67ef35..2b899e21e 100644
--- a/src/tools/clippy/clippy_lints/src/from_over_into.rs
+++ b/src/tools/clippy/clippy_lints/src/from_over_into.rs
@@ -10,7 +10,8 @@ use rustc_hir::{
TyKind,
};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::{hir::nested_filter::OnlyBodies, ty};
+use rustc_middle::hir::nested_filter::OnlyBodies;
+use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::{kw, sym};
use rustc_span::{Span, Symbol};
@@ -76,9 +77,10 @@ impl<'tcx> LateLintPass<'tcx> for FromOverInto {
&& let Some(into_trait_seg) = hir_trait_ref.path.segments.last()
// `impl Into<target_ty> for self_ty`
&& let Some(GenericArgs { args: [GenericArg::Type(target_ty)], .. }) = into_trait_seg.args
- && let Some(middle_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(ty::EarlyBinder::subst_identity)
+ && let Some(middle_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id)
+ .map(ty::EarlyBinder::instantiate_identity)
&& cx.tcx.is_diagnostic_item(sym::Into, middle_trait_ref.def_id)
- && !matches!(middle_trait_ref.substs.type_at(1).kind(), ty::Alias(ty::Opaque, _))
+ && !matches!(middle_trait_ref.args.type_at(1).kind(), ty::Alias(ty::Opaque, _))
{
span_lint_and_then(
cx,
@@ -163,10 +165,14 @@ fn convert_to_from(
return None;
}
let impl_item = cx.tcx.hir().impl_item(impl_item_ref.id);
- let ImplItemKind::Fn(ref sig, body_id) = impl_item.kind else { return None };
+ let ImplItemKind::Fn(ref sig, body_id) = impl_item.kind else {
+ return None;
+ };
let body = cx.tcx.hir().body(body_id);
let [input] = body.params else { return None };
- let PatKind::Binding(.., self_ident, None) = input.pat.kind else { return None };
+ let PatKind::Binding(.., self_ident, None) = input.pat.kind else {
+ return None;
+ };
let from = snippet_opt(cx, self_ty.span)?;
let into = snippet_opt(cx, target_ty.span)?;
diff --git a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
index 096508dc4..5e859d97c 100644
--- a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
@@ -4,8 +4,7 @@ use clippy_utils::{match_def_path, path_def_id, paths};
use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::RawPtr;
-use rustc_middle::ty::TypeAndMut;
+use rustc_middle::ty::{RawPtr, TypeAndMut};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
diff --git a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
index d3d0d91c1..597fca888 100644
--- a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs
@@ -1,6 +1,8 @@
-use clippy_utils::{diagnostics::span_lint_and_then, is_in_test_function};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::is_in_test_function;
-use rustc_hir::{intravisit::FnKind, Body, HirId};
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, HirId};
use rustc_lint::LateContext;
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
index b244b9133..18f7368da 100644
--- a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs
@@ -1,7 +1,8 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet;
use rustc_errors::Applicability;
-use rustc_hir::{intravisit::FnKind, Body, ExprKind, FnDecl, ImplicitSelfKind, Unsafety};
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, ExprKind, FnDecl, ImplicitSelfKind, Unsafety};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::Span;
@@ -12,8 +13,8 @@ use super::MISNAMED_GETTERS;
pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: &Body<'_>, span: Span) {
let FnKind::Method(ref ident, sig) = kind else {
- return;
- };
+ return;
+ };
// Takes only &(mut) self
if decl.inputs.len() != 1 {
@@ -25,8 +26,8 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body:
let name = match decl.implicit_self {
ImplicitSelfKind::MutRef => {
let Some(name) = name.strip_suffix("_mut") else {
- return;
- };
+ return;
+ };
name
},
ImplicitSelfKind::Imm | ImplicitSelfKind::Mut | ImplicitSelfKind::ImmRef => name,
@@ -76,7 +77,7 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body:
for adjusted_type in iter::once(typeck_results.expr_ty(self_data))
.chain(typeck_results.expr_adjustments(self_data).iter().map(|adj| adj.target))
{
- let ty::Adt(def,_) = adjusted_type.kind() else {
+ let ty::Adt(def, _) = adjusted_type.kind() else {
continue;
};
@@ -91,13 +92,15 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body:
}
let Some(used_field) = used_field else {
- // Can happen if the field access is a tuple. We don't lint those because the getter name could not start with a number.
+ // Can happen if the field access is a tuple. We don't lint those because the getter name could not
+ // start with a number.
return;
};
let Some(correct_field) = correct_field else {
// There is no field corresponding to the getter name.
- // FIXME: This can be a false positive if the correct field is reachable through deeper autodereferences than used_field is
+ // FIXME: This can be a false positive if the correct field is reachable through deeper
+ // autodereferences than used_field is
return;
};
diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs
index d0ad26282..57df5683c 100644
--- a/src/tools/clippy/clippy_lints/src/functions/must_use.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs
@@ -1,14 +1,13 @@
use hir::FnSig;
use rustc_ast::ast::Attribute;
use rustc_errors::Applicability;
+use rustc_hir::def::Res;
use rustc_hir::def_id::DefIdSet;
-use rustc_hir::{self as hir, def::Res, QPath};
+use rustc_hir::{self as hir, QPath};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LintContext};
-use rustc_middle::{
- lint::in_external_macro,
- ty::{self, Ty},
-};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{self, Ty};
use rustc_span::{sym, Span, Symbol};
use clippy_utils::attrs::is_proc_macro;
@@ -198,14 +197,14 @@ fn is_mutable_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, tys: &mut DefIdSet)
match *ty.kind() {
// primitive types are never mutable
ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str => false,
- ty::Adt(adt, substs) => {
+ ty::Adt(adt, args) => {
tys.insert(adt.did()) && !ty.is_freeze(cx.tcx, cx.param_env)
|| KNOWN_WRAPPER_TYS
.iter()
.any(|&sym| cx.tcx.is_diagnostic_item(sym, adt.did()))
- && substs.types().any(|ty| is_mutable_ty(cx, ty, tys))
+ && args.types().any(|ty| is_mutable_ty(cx, ty, tys))
},
- ty::Tuple(substs) => substs.iter().any(|ty| is_mutable_ty(cx, ty, tys)),
+ ty::Tuple(args) => args.iter().any(|ty| is_mutable_ty(cx, ty, tys)),
ty::Array(ty, _) | ty::Slice(ty) => is_mutable_ty(cx, ty, tys),
ty::RawPtr(ty::TypeAndMut { ty, mutbl }) | ty::Ref(_, ty, mutbl) => {
mutbl == hir::Mutability::Mut || is_mutable_ty(cx, ty, tys)
@@ -222,7 +221,7 @@ fn is_mutated_static(e: &hir::Expr<'_>) -> bool {
match e.kind {
Path(QPath::Resolved(_, path)) => !matches!(path.res, Res::Local(_)),
Path(_) => true,
- Field(inner, _) | Index(inner, _) => is_mutated_static(inner),
+ Field(inner, _) | Index(inner, _, _) => is_mutated_static(inner),
_ => false,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/functions/result.rs b/src/tools/clippy/clippy_lints/src/functions/result.rs
index fa2a9b30c..90fc0d4f6 100644
--- a/src/tools/clippy/clippy_lints/src/functions/result.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/result.rs
@@ -21,11 +21,11 @@ fn result_err_ty<'tcx>(
) -> Option<(&'tcx hir::Ty<'tcx>, Ty<'tcx>)> {
if !in_external_macro(cx.sess(), item_span)
&& let hir::FnRetTy::Return(hir_ty) = decl.output
- && let ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(id).subst_identity().output())
+ && let ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(id).instantiate_identity().output())
&& is_type_diagnostic_item(cx, ty, sym::Result)
- && let ty::Adt(_, substs) = ty.kind()
+ && let ty::Adt(_, args) = ty.kind()
{
- let err_ty = substs.type_at(1);
+ let err_ty = args.type_at(1);
Some((hir_ty, err_ty))
} else {
None
diff --git a/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs b/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs
index bd473ac7e..34f1bf3b2 100644
--- a/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs
+++ b/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs
@@ -23,7 +23,7 @@ pub(super) fn check_fn(
}
let Some(code_snippet) = snippet_opt(cx, body.value.span) else {
- return
+ return;
};
let mut line_count: u64 = 0;
let mut in_comment = false;
diff --git a/src/tools/clippy/clippy_lints/src/future_not_send.rs b/src/tools/clippy/clippy_lints/src/future_not_send.rs
index 818ebd113..621415c88 100644
--- a/src/tools/clippy/clippy_lints/src/future_not_send.rs
+++ b/src/tools/clippy/clippy_lints/src/future_not_send.rs
@@ -63,10 +63,10 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend {
return;
}
let ret_ty = return_ty(cx, cx.tcx.hir().local_def_id_to_hir_id(fn_def_id).expect_owner());
- if let ty::Alias(ty::Opaque, AliasTy { def_id, substs, .. }) = *ret_ty.kind() {
+ if let ty::Alias(ty::Opaque, AliasTy { def_id, args, .. }) = *ret_ty.kind() {
let preds = cx.tcx.explicit_item_bounds(def_id);
let mut is_future = false;
- for (p, _span) in preds.subst_iter_copied(cx.tcx, substs) {
+ for (p, _span) in preds.iter_instantiated_copied(cx.tcx, args) {
if let Some(trait_pred) = p.as_trait_clause() {
if Some(trait_pred.skip_binder().trait_ref.def_id) == cx.tcx.lang_items().future_trait() {
is_future = true;
diff --git a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
index 9ea8c494c..e614a8f69 100644
--- a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
+++ b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::higher;
use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::SpanlessEq;
+use clippy_utils::{higher, SpanlessEq};
use if_chain::if_chain;
use rustc_errors::Diagnostic;
use rustc_hir::intravisit::{self as visit, Visitor};
diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
index 725bd3d54..ab6ad3f3b 100644
--- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
+++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs
@@ -119,7 +119,13 @@ impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone {
fn stmts_contains_early_return(stmts: &[Stmt<'_>]) -> bool {
stmts.iter().any(|stmt| {
- let Stmt { kind: StmtKind::Semi(e), .. } = stmt else { return false };
+ let Stmt {
+ kind: StmtKind::Semi(e),
+ ..
+ } = stmt
+ else {
+ return false;
+ };
contains_return(e)
})
diff --git a/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs b/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs
new file mode 100644
index 000000000..c635120b8
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/ignored_unit_patterns.rs
@@ -0,0 +1,52 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use hir::PatKind;
+use rustc_errors::Applicability;
+use rustc_hir as hir;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `_` in patterns of type `()`.
+ ///
+ /// ### Why is this bad?
+ /// Matching with `()` explicitly instead of `_` outlines
+ /// the fact that the pattern contains no data. Also it
+ /// would detect a type change that `_` would ignore.
+ ///
+ /// ### Example
+ /// ```rust
+ /// match std::fs::create_dir("tmp-work-dir") {
+ /// Ok(_) => println!("Working directory created"),
+ /// Err(s) => eprintln!("Could not create directory: {s}"),
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// match std::fs::create_dir("tmp-work-dir") {
+ /// Ok(()) => println!("Working directory created"),
+ /// Err(s) => eprintln!("Could not create directory: {s}"),
+ /// }
+ /// ```
+ #[clippy::version = "1.73.0"]
+ pub IGNORED_UNIT_PATTERNS,
+ pedantic,
+ "suggest replacing `_` by `()` in patterns where appropriate"
+}
+declare_lint_pass!(IgnoredUnitPatterns => [IGNORED_UNIT_PATTERNS]);
+
+impl<'tcx> LateLintPass<'tcx> for IgnoredUnitPatterns {
+ fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx hir::Pat<'tcx>) {
+ if matches!(pat.kind, PatKind::Wild) && cx.typeck_results().pat_ty(pat).is_unit() {
+ span_lint_and_sugg(
+ cx,
+ IGNORED_UNIT_PATTERNS,
+ pat.span,
+ "matching over `()` is more explicit",
+ "use `()` instead of `_`",
+ String::from("()"),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/implicit_return.rs b/src/tools/clippy/clippy_lints/src/implicit_return.rs
index 372b6ead3..a6b035d51 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_return.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_return.rs
@@ -1,9 +1,7 @@
-use clippy_utils::{
- diagnostics::span_lint_hir_and_then,
- get_async_fn_body, is_async_fn,
- source::{snippet_with_applicability, snippet_with_context, walk_span_to_context},
- visitors::for_each_expr,
-};
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::source::{snippet_with_applicability, snippet_with_context, walk_span_to_context};
+use clippy_utils::visitors::for_each_expr;
+use clippy_utils::{get_async_fn_body, is_async_fn};
use core::ops::ControlFlow;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
index 1e99b6faa..b99d45446 100644
--- a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
+++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs
@@ -102,7 +102,7 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingSub {
if let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(const_id);
if let None = cx.tcx.impl_trait_ref(impl_id); // An inherent impl
- if cx.tcx.type_of(impl_id).subst_identity().is_integral();
+ if cx.tcx.type_of(impl_id).instantiate_identity().is_integral();
then {
print_lint_and_sugg(cx, var_name, expr)
}
@@ -115,7 +115,7 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingSub {
if let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(func_id);
if let None = cx.tcx.impl_trait_ref(impl_id); // An inherent impl
- if cx.tcx.type_of(impl_id).subst_identity().is_integral();
+ if cx.tcx.type_of(impl_id).instantiate_identity().is_integral();
then {
print_lint_and_sugg(cx, var_name, expr)
}
diff --git a/src/tools/clippy/clippy_lints/src/incorrect_impls.rs b/src/tools/clippy/clippy_lints/src/incorrect_impls.rs
index 7b95116ee..3c59b839a 100644
--- a/src/tools/clippy/clippy_lints/src/incorrect_impls.rs
+++ b/src/tools/clippy/clippy_lints/src/incorrect_impls.rs
@@ -1,11 +1,15 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, get_parent_node, last_path_segment, ty::implements_trait};
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::paths::ORD_CMP;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{get_parent_node, is_res_lang_ctor, last_path_segment, match_def_path, path_res, std_or_core};
use rustc_errors::Applicability;
-use rustc_hir::{ExprKind, ImplItem, ImplItemKind, ItemKind, Node, UnOp};
-use rustc_hir_analysis::hir_ty_to_ty;
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::{Expr, ExprKind, ImplItem, ImplItemKind, ItemKind, LangItem, Node, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::EarlyBinder;
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{sym, symbol};
+use rustc_span::sym;
+use rustc_span::symbol::kw;
declare_clippy_lint! {
/// ### What it does
@@ -46,25 +50,84 @@ declare_clippy_lint! {
correctness,
"manual implementation of `Clone` on a `Copy` type"
}
-declare_lint_pass!(IncorrectImpls => [INCORRECT_CLONE_IMPL_ON_COPY_TYPE]);
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual implementations of both `PartialOrd` and `Ord` when only `Ord` is
+ /// necessary.
+ ///
+ /// ### Why is this bad?
+ /// If both `PartialOrd` and `Ord` are implemented, they must agree. This is commonly done by
+ /// wrapping the result of `cmp` in `Some` for `partial_cmp`. Not doing this may silently
+ /// introduce an error upon refactoring.
+ ///
+ /// ### Known issues
+ /// Code that calls the `.into()` method instead will be flagged as incorrect, despite `.into()`
+ /// wrapping it in `Some`.
+ ///
+ /// ### Limitations
+ /// Will not lint if `Self` and `Rhs` do not have the same type.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # use std::cmp::Ordering;
+ /// #[derive(Eq, PartialEq)]
+ /// struct A(u32);
+ ///
+ /// impl Ord for A {
+ /// fn cmp(&self, other: &Self) -> Ordering {
+ /// // ...
+ /// # todo!();
+ /// }
+ /// }
+ ///
+ /// impl PartialOrd for A {
+ /// fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ /// // ...
+ /// # todo!();
+ /// }
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # use std::cmp::Ordering;
+ /// #[derive(Eq, PartialEq)]
+ /// struct A(u32);
+ ///
+ /// impl Ord for A {
+ /// fn cmp(&self, other: &Self) -> Ordering {
+ /// // ...
+ /// # todo!();
+ /// }
+ /// }
+ ///
+ /// impl PartialOrd for A {
+ /// fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ /// Some(self.cmp(other))
+ /// }
+ /// }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE,
+ correctness,
+ "manual implementation of `PartialOrd` when `Ord` is already implemented"
+}
+declare_lint_pass!(IncorrectImpls => [INCORRECT_CLONE_IMPL_ON_COPY_TYPE, INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE]);
impl LateLintPass<'_> for IncorrectImpls {
- #[expect(clippy::needless_return)]
+ #[expect(clippy::too_many_lines)]
fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &ImplItem<'_>) {
- let node = get_parent_node(cx.tcx, impl_item.hir_id());
- let Some(Node::Item(item)) = node else {
- return;
- };
- let ItemKind::Impl(imp) = item.kind else {
+ let Some(Node::Item(item)) = get_parent_node(cx.tcx, impl_item.hir_id()) else {
return;
};
let Some(trait_impl) = cx.tcx.impl_trait_ref(item.owner_id).map(EarlyBinder::skip_binder) else {
return;
};
- let trait_impl_def_id = trait_impl.def_id;
if cx.tcx.is_automatically_derived(item.owner_id.to_def_id()) {
return;
}
+ let ItemKind::Impl(_) = item.kind else {
+ return;
+ };
let ImplItemKind::Fn(_, impl_item_id) = cx.tcx.hir().impl_item(impl_item.impl_item_id()).kind else {
return;
};
@@ -72,15 +135,12 @@ impl LateLintPass<'_> for IncorrectImpls {
let ExprKind::Block(block, ..) = body.value.kind else {
return;
};
- // Above is duplicated from the `duplicate_manual_partial_ord_impl` branch.
- // Remove it while solving conflicts once that PR is merged.
- // Actual implementation; remove this comment once aforementioned PR is merged
- if cx.tcx.is_diagnostic_item(sym::Clone, trait_impl_def_id)
+ if cx.tcx.is_diagnostic_item(sym::Clone, trait_impl.def_id)
&& let Some(copy_def_id) = cx.tcx.get_diagnostic_item(sym::Copy)
&& implements_trait(
cx,
- hir_ty_to_ty(cx.tcx, imp.self_ty),
+ trait_impl.self_ty(),
copy_def_id,
&[],
)
@@ -88,9 +148,9 @@ impl LateLintPass<'_> for IncorrectImpls {
if impl_item.ident.name == sym::clone {
if block.stmts.is_empty()
&& let Some(expr) = block.expr
- && let ExprKind::Unary(UnOp::Deref, inner) = expr.kind
- && let ExprKind::Path(qpath) = inner.kind
- && last_path_segment(&qpath).ident.name == symbol::kw::SelfLower
+ && let ExprKind::Unary(UnOp::Deref, deref) = expr.kind
+ && let ExprKind::Path(qpath) = deref.kind
+ && last_path_segment(&qpath).ident.name == kw::SelfLower
{} else {
span_lint_and_sugg(
cx,
@@ -112,7 +172,7 @@ impl LateLintPass<'_> for IncorrectImpls {
INCORRECT_CLONE_IMPL_ON_COPY_TYPE,
impl_item.span,
"incorrect implementation of `clone_from` on a `Copy` type",
- "remove this",
+ "remove it",
String::new(),
Applicability::MaybeIncorrect,
);
@@ -120,5 +180,116 @@ impl LateLintPass<'_> for IncorrectImpls {
return;
}
}
+
+ if cx.tcx.is_diagnostic_item(sym::PartialOrd, trait_impl.def_id)
+ && impl_item.ident.name == sym::partial_cmp
+ && let Some(ord_def_id) = cx
+ .tcx
+ .diagnostic_items(trait_impl.def_id.krate)
+ .name_to_id
+ .get(&sym::Ord)
+ && implements_trait(
+ cx,
+ trait_impl.self_ty(),
+ *ord_def_id,
+ &[],
+ )
+ {
+ // If the `cmp` call likely needs to be fully qualified in the suggestion
+ // (like `std::cmp::Ord::cmp`). It's unfortunate we must put this here but we can't
+ // access `cmp_expr` in the suggestion without major changes, as we lint in `else`.
+ let mut needs_fully_qualified = false;
+
+ if block.stmts.is_empty()
+ && let Some(expr) = block.expr
+ && let ExprKind::Call(
+ Expr {
+ kind: ExprKind::Path(some_path),
+ hir_id: some_hir_id,
+ ..
+ },
+ [cmp_expr],
+ ) = expr.kind
+ && is_res_lang_ctor(cx, cx.qpath_res(some_path, *some_hir_id), LangItem::OptionSome)
+ // Fix #11178, allow `Self::cmp(self, ..)` too
+ && self_cmp_call(cx, cmp_expr, impl_item.owner_id.def_id, &mut needs_fully_qualified)
+ {} else {
+ // If `Self` and `Rhs` are not the same type, bail. This makes creating a valid
+ // suggestion tons more complex.
+ if let [lhs, rhs, ..] = trait_impl.args.as_slice() && lhs != rhs {
+ return;
+ }
+
+ span_lint_and_then(
+ cx,
+ INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE,
+ item.span,
+ "incorrect implementation of `partial_cmp` on an `Ord` type",
+ |diag| {
+ let [_, other] = body.params else {
+ return;
+ };
+ let Some(std_or_core) = std_or_core(cx) else {
+ return;
+ };
+
+ let suggs = match (other.pat.simple_ident(), needs_fully_qualified) {
+ (Some(other_ident), true) => vec![(
+ block.span,
+ format!("{{ Some({std_or_core}::cmp::Ord::cmp(self, {})) }}", other_ident.name),
+ )],
+ (Some(other_ident), false) => {
+ vec![(block.span, format!("{{ Some(self.cmp({})) }}", other_ident.name))]
+ },
+ (None, true) => vec![
+ (
+ block.span,
+ format!("{{ Some({std_or_core}::cmp::Ord::cmp(self, other)) }}"),
+ ),
+ (other.pat.span, "other".to_owned()),
+ ],
+ (None, false) => vec![
+ (block.span, "{ Some(self.cmp(other)) }".to_owned()),
+ (other.pat.span, "other".to_owned()),
+ ],
+ };
+
+ diag.multipart_suggestion(
+ "change this to",
+ suggs,
+ Applicability::Unspecified,
+ );
+ }
+ );
+ }
+ }
+ }
+}
+
+/// Returns whether this is any of `self.cmp(..)`, `Self::cmp(self, ..)` or `Ord::cmp(self, ..)`.
+fn self_cmp_call<'tcx>(
+ cx: &LateContext<'tcx>,
+ cmp_expr: &'tcx Expr<'tcx>,
+ def_id: LocalDefId,
+ needs_fully_qualified: &mut bool,
+) -> bool {
+ match cmp_expr.kind {
+ ExprKind::Call(path, [_self, _other]) => path_res(cx, path)
+ .opt_def_id()
+ .is_some_and(|def_id| match_def_path(cx, def_id, &ORD_CMP)),
+ ExprKind::MethodCall(_, _, [_other], ..) => {
+ // We can set this to true here no matter what as if it's a `MethodCall` and goes to the
+ // `else` branch, it must be a method named `cmp` that isn't `Ord::cmp`
+ *needs_fully_qualified = true;
+
+ // It's a bit annoying but `typeck_results` only gives us the CURRENT body, which we
+ // have none, not of any `LocalDefId` we want, so we must call the query itself to avoid
+ // an immediate ICE
+ cx.tcx
+ .typeck(def_id)
+ .type_dependent_def_id(cmp_expr.hir_id)
+ .is_some_and(|def_id| match_def_path(cx, def_id, &ORD_CMP))
+ },
+ _ => false,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
index 7a269e98f..f507f45d5 100644
--- a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
+++ b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs
@@ -13,7 +13,8 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{symbol::Ident, Span};
+use rustc_span::symbol::Ident;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
@@ -253,7 +254,7 @@ impl<'a, 'tcx> Visitor<'tcx> for SliceIndexLintingVisitor<'a, 'tcx> {
// Checking for slice indexing
let parent_id = map.parent_id(expr.hir_id);
if let Some(hir::Node::Expr(parent_expr)) = map.find(parent_id);
- if let hir::ExprKind::Index(_, index_expr) = parent_expr.kind;
+ if let hir::ExprKind::Index(_, index_expr, _) = parent_expr.kind;
if let Some(Constant::Int(index_value)) = constant(cx, cx.typeck_results(), index_expr);
if let Ok(index_value) = index_value.try_into();
if index_value < max_suggested_slice;
diff --git a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
index 22c14d9b0..4f4f57177 100644
--- a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
+++ b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs
@@ -103,7 +103,7 @@ impl<'tcx> LateLintPass<'tcx> for IndexingSlicing {
return;
}
- if let ExprKind::Index(array, index) = &expr.kind {
+ if let ExprKind::Index(array, index, _) = &expr.kind {
let note = "the suggestion might not be applicable in constant blocks";
let ty = cx.typeck_results().expr_ty(array).peel_refs();
if let Some(range) = higher::Range::hir(index) {
diff --git a/src/tools/clippy/clippy_lints/src/inherent_impl.rs b/src/tools/clippy/clippy_lints/src/inherent_impl.rs
index 7c41699f3..3d1113ff9 100644
--- a/src/tools/clippy/clippy_lints/src/inherent_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/inherent_impl.rs
@@ -3,7 +3,8 @@
use clippy_utils::diagnostics::span_lint_and_note;
use clippy_utils::is_lint_allowed;
use rustc_data_structures::fx::FxHashMap;
-use rustc_hir::{def_id::LocalDefId, Item, ItemKind, Node};
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::{Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::Span;
@@ -66,7 +67,7 @@ impl<'tcx> LateLintPass<'tcx> for MultipleInherentImpl {
)
}) {
for impl_id in impl_ids.iter().map(|id| id.expect_local()) {
- let impl_ty = cx.tcx.type_of(impl_id).subst_identity();
+ let impl_ty = cx.tcx.type_of(impl_id).instantiate_identity();
match type_map.entry(impl_ty) {
Entry::Vacant(e) => {
// Store the id for the first impl block of this type. The span is retrieved lazily.
diff --git a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
index d43e5cc9b..bc4ec33b7 100644
--- a/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
+++ b/src/tools/clippy/clippy_lints/src/inherent_to_string.rs
@@ -1,11 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::ty::{implements_trait, is_type_lang_item};
use clippy_utils::{return_ty, trait_ref_of_method};
-use if_chain::if_chain;
-use rustc_hir::{GenericParamKind, ImplItem, ImplItemKind, LangItem};
+use rustc_hir::{GenericParamKind, ImplItem, ImplItemKind, LangItem, Unsafety};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
+use rustc_target::spec::abi::Abi;
declare_clippy_lint! {
/// ### What it does
@@ -95,24 +95,23 @@ impl<'tcx> LateLintPass<'tcx> for InherentToString {
return;
}
- if_chain! {
- // Check if item is a method, called to_string and has a parameter 'self'
- if let ImplItemKind::Fn(ref signature, _) = impl_item.kind;
- if impl_item.ident.name == sym::to_string;
- let decl = &signature.decl;
- if decl.implicit_self.has_implicit_self();
- if decl.inputs.len() == 1;
- if impl_item.generics.params.iter().all(|p| matches!(p.kind, GenericParamKind::Lifetime { .. }));
-
+ // Check if item is a method called `to_string` and has a parameter 'self'
+ if let ImplItemKind::Fn(ref signature, _) = impl_item.kind
+ // #11201
+ && let header = signature.header
+ && header.unsafety == Unsafety::Normal
+ && header.abi == Abi::Rust
+ && impl_item.ident.name == sym::to_string
+ && let decl = signature.decl
+ && decl.implicit_self.has_implicit_self()
+ && decl.inputs.len() == 1
+ && impl_item.generics.params.iter().all(|p| matches!(p.kind, GenericParamKind::Lifetime { .. }))
// Check if return type is String
- if is_type_lang_item(cx, return_ty(cx, impl_item.owner_id), LangItem::String);
-
+ && is_type_lang_item(cx, return_ty(cx, impl_item.owner_id), LangItem::String)
// Filters instances of to_string which are required by a trait
- if trait_ref_of_method(cx, impl_item.owner_id.def_id).is_none();
-
- then {
- show_lint(cx, impl_item);
- }
+ && trait_ref_of_method(cx, impl_item.owner_id.def_id).is_none()
+ {
+ show_lint(cx, impl_item);
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
index 7e1548531..b00fa104f 100644
--- a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
+++ b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs
@@ -50,7 +50,7 @@ impl<'tcx> LateLintPass<'tcx> for NumberedFields {
&& fields
.iter()
.all(|f| f.ident.as_str().as_bytes().iter().all(u8::is_ascii_digit))
- && !matches!(cx.qpath_res(path, e.hir_id), Res::Def(DefKind::TyAlias, ..))
+ && !matches!(cx.qpath_res(path, e.hir_id), Res::Def(DefKind::TyAlias { .. }, ..))
{
let expr_spans = fields
.iter()
@@ -71,7 +71,7 @@ impl<'tcx> LateLintPass<'tcx> for NumberedFields {
INIT_NUMBERED_FIELDS,
e.span,
"used a field initializer for a tuple struct",
- "try this instead",
+ "try",
snippet,
appl,
);
diff --git a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
index 34e999158..8df7dfb8b 100644
--- a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
+++ b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs
@@ -7,7 +7,8 @@ use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{source_map::Spanned, sym};
+use rustc_span::source_map::Spanned;
+use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/items_after_test_module.rs b/src/tools/clippy/clippy_lints/src/items_after_test_module.rs
index 40378ee82..55a43e915 100644
--- a/src/tools/clippy/clippy_lints/src/items_after_test_module.rs
+++ b/src/tools/clippy/clippy_lints/src/items_after_test_module.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint_and_help, is_from_proc_macro, is_in_cfg_test};
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::{is_from_proc_macro, is_in_cfg_test};
use rustc_hir::{HirId, ItemId, ItemKind, Mod};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
@@ -32,7 +33,7 @@ declare_clippy_lint! {
/// // [...]
/// }
/// ```
- #[clippy::version = "1.70.0"]
+ #[clippy::version = "1.71.0"]
pub ITEMS_AFTER_TEST_MODULE,
style,
"An item was found after the testing module `tests`"
diff --git a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
index c924d7361..066d2c4b7 100644
--- a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
+++ b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs
@@ -1,5 +1,8 @@
-use clippy_utils::{diagnostics::span_lint, get_parent_node, ty::implements_trait};
-use rustc_hir::{def_id::LocalDefId, FnSig, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::get_parent_node;
+use clippy_utils::ty::implements_trait;
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::{FnSig, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::sym;
@@ -68,7 +71,7 @@ fn check_sig(cx: &LateContext<'_>, name: &str, sig: &FnSig<'_>, fn_id: LocalDefI
if sig.decl.implicit_self.has_implicit_self() {
let ret_ty = cx
.tcx
- .erase_late_bound_regions(cx.tcx.fn_sig(fn_id).subst_identity().output());
+ .erase_late_bound_regions(cx.tcx.fn_sig(fn_id).instantiate_identity().output());
let ret_ty = cx
.tcx
.try_normalize_erasing_regions(cx.param_env, ret_ty)
diff --git a/src/tools/clippy/clippy_lints/src/large_const_arrays.rs b/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
index 4dc750c03..9b26c3573 100644
--- a/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
+++ b/src/tools/clippy/clippy_lints/src/large_const_arrays.rs
@@ -50,7 +50,11 @@ impl<'tcx> LateLintPass<'tcx> for LargeConstArrays {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
if_chain! {
if !item.span.from_expansion();
- if let ItemKind::Const(hir_ty, _) = &item.kind;
+ if let ItemKind::Const(hir_ty, generics, _) = &item.kind;
+ // Since static items may not have generics, skip generic const items.
+ // FIXME(generic_const_items): I don't think checking `generics.hwcp` suffices as it
+ // doesn't account for empty where-clauses that only consist of keyword `where` IINM.
+ if generics.params.is_empty() && !generics.has_where_clause_predicates;
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
if let ty::Array(element_type, cst) = ty.kind();
if let ConstKind::Value(ty::ValTree::Leaf(element_count)) = cst.kind();
diff --git a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
index 1c99bd2f3..b22b57a30 100644
--- a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
+++ b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs
@@ -1,10 +1,8 @@
//! lint when there is a large size difference between variants on an enum
+use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_with_applicability;
-use clippy_utils::{
- diagnostics::span_lint_and_then,
- ty::{approx_ty_size, is_copy, AdtVariantInfo},
-};
+use clippy_utils::ty::{approx_ty_size, is_copy, AdtVariantInfo};
use rustc_errors::Applicability;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
@@ -83,7 +81,7 @@ impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant {
return;
}
if let ItemKind::Enum(ref def, _) = item.kind {
- let ty = cx.tcx.type_of(item.owner_id).subst_identity();
+ let ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
let Adt(adt, subst) = ty.kind() else {
panic!("already checked whether this is an enum")
};
@@ -169,8 +167,8 @@ impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant {
}
fn maybe_copy<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
- if let Adt(_def, substs) = ty.kind()
- && substs.types().next().is_some()
+ if let Adt(_def, args) = ty.kind()
+ && args.types().next().is_some()
&& let Some(copy_trait) = cx.tcx.lang_items().copy_trait()
{
return cx.tcx.non_blanket_impls_for_ty(copy_trait, ty).next().is_some();
diff --git a/src/tools/clippy/clippy_lints/src/large_futures.rs b/src/tools/clippy/clippy_lints/src/large_futures.rs
index 087c4a652..d67d58993 100644
--- a/src/tools/clippy/clippy_lints/src/large_futures.rs
+++ b/src/tools/clippy/clippy_lints/src/large_futures.rs
@@ -1,5 +1,6 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
-use clippy_utils::{diagnostics::span_lint_and_sugg, ty::implements_trait};
+use clippy_utils::ty::implements_trait;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath};
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/large_include_file.rs b/src/tools/clippy/clippy_lints/src/large_include_file.rs
index 424c0d9e7..566901de3 100644
--- a/src/tools/clippy/clippy_lints/src/large_include_file.rs
+++ b/src/tools/clippy/clippy_lints/src/large_include_file.rs
@@ -2,8 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_note;
use clippy_utils::is_lint_allowed;
use clippy_utils::macros::root_macro_call_first_node;
use rustc_ast::LitKind;
-use rustc_hir::Expr;
-use rustc_hir::ExprKind;
+use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
diff --git a/src/tools/clippy/clippy_lints/src/large_stack_frames.rs b/src/tools/clippy/clippy_lints/src/large_stack_frames.rs
index 9c0cc978a..7aa1446d5 100644
--- a/src/tools/clippy/clippy_lints/src/large_stack_frames.rs
+++ b/src/tools/clippy/clippy_lints/src/large_stack_frames.rs
@@ -4,11 +4,9 @@ use clippy_utils::diagnostics::span_lint_and_note;
use clippy_utils::fn_has_unsatisfiable_preds;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::FnKind;
-use rustc_hir::Body;
-use rustc_hir::FnDecl;
+use rustc_hir::{Body, FnDecl};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::declare_tool_lint;
-use rustc_session::impl_lint_pass;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::Span;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs
index 17bd89efa..deba232bd 100644
--- a/src/tools/clippy/clippy_lints/src/len_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/len_zero.rs
@@ -1,22 +1,22 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::snippet_with_context;
-use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators, sugg::Sugg};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
-use rustc_hir::def_id::DefIdSet;
+use rustc_hir::def::Res;
+use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_hir::{
- def::Res, def_id::DefId, lang_items::LangItem, AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg,
- GenericBound, ImplItem, ImplItemKind, ImplicitSelfKind, Item, ItemKind, Mutability, Node, PathSegment, PrimTy,
- QPath, TraitItemRef, TyKind, TypeBindingKind,
+ AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, ImplItem, ImplItemKind,
+ ImplicitSelfKind, Item, ItemKind, LangItem, Mutability, Node, PatKind, PathSegment, PrimTy, QPath, TraitItemRef,
+ TyKind, TypeBindingKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, AssocKind, FnSig, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{
- source_map::{Span, Spanned, Symbol},
- symbol::sym,
-};
+use rustc_span::source_map::{Span, Spanned, Symbol};
+use rustc_span::symbol::sym;
declare_clippy_lint! {
/// ### What it does
@@ -145,7 +145,10 @@ impl<'tcx> LateLintPass<'tcx> for LenZero {
if let Some(local_id) = ty_id.as_local();
let ty_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id);
if !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id);
- if let Some(output) = parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).subst_identity().skip_binder());
+ if let Some(output) = parse_len_output(
+ cx,
+ cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder()
+ );
then {
let (name, kind) = match cx.tcx.hir().find(ty_hir_id) {
Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"),
@@ -167,6 +170,31 @@ impl<'tcx> LateLintPass<'tcx> for LenZero {
return;
}
+ if let ExprKind::Let(lt) = expr.kind
+ && has_is_empty(cx, lt.init)
+ && match lt.pat.kind {
+ PatKind::Slice([], None, []) => true,
+ PatKind::Lit(lit) if is_empty_string(lit) => true,
+ _ => false,
+ }
+ {
+ let mut applicability = Applicability::MachineApplicable;
+
+ let lit1 = peel_ref_operators(cx, lt.init);
+ let lit_str =
+ Sugg::hir_with_context(cx, lit1, lt.span.ctxt(), "_", &mut applicability).maybe_par();
+
+ span_lint_and_sugg(
+ cx,
+ COMPARISON_TO_EMPTY,
+ lt.span,
+ "comparison to empty slice using `if let`",
+ "using `is_empty` is clearer and more explicit",
+ format!("{lit_str}.is_empty()"),
+ applicability,
+ );
+ }
+
if let ExprKind::Binary(Spanned { node: cmp, .. }, left, right) = expr.kind {
// expr.span might contains parenthesis, see issue #10529
let actual_span = left.span.with_hi(right.span.hi());
@@ -425,7 +453,7 @@ fn check_for_is_empty(
if !(is_empty.fn_has_self_parameter
&& check_is_empty_sig(
cx,
- cx.tcx.fn_sig(is_empty.def_id).subst_identity().skip_binder(),
+ cx.tcx.fn_sig(is_empty.def_id).instantiate_identity().skip_binder(),
self_kind,
output,
)) =>
diff --git a/src/tools/clippy/clippy_lints/src/let_if_seq.rs b/src/tools/clippy/clippy_lints/src/let_if_seq.rs
index db41bc67d..2f6f36c39 100644
--- a/src/tools/clippy/clippy_lints/src/let_if_seq.rs
+++ b/src/tools/clippy/clippy_lints/src/let_if_seq.rs
@@ -1,6 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::path_to_local_id;
use clippy_utils::source::snippet;
-use clippy_utils::{path_to_local_id, visitors::is_local_used};
+use clippy_utils::visitors::is_local_used;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
diff --git a/src/tools/clippy/clippy_lints/src/let_underscore.rs b/src/tools/clippy/clippy_lints/src/let_underscore.rs
index e66141809..e7c875ab3 100644
--- a/src/tools/clippy/clippy_lints/src/let_underscore.rs
+++ b/src/tools/clippy/clippy_lints/src/let_underscore.rs
@@ -1,12 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_help;
-use clippy_utils::is_from_proc_macro;
use clippy_utils::ty::{implements_trait, is_must_use_ty, match_type};
-use clippy_utils::{is_must_use_func_call, paths};
+use clippy_utils::{is_from_proc_macro, is_must_use_func_call, paths};
use rustc_hir::{Local, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
-use rustc_middle::ty::subst::GenericArgKind;
-use rustc_middle::ty::IsSuggestable;
+use rustc_middle::ty::{GenericArgKind, IsSuggestable};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{BytePos, Span};
diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs
index 87329ee5e..358004cf4 100644
--- a/src/tools/clippy/clippy_lints/src/lib.rs
+++ b/src/tools/clippy/clippy_lints/src/lib.rs
@@ -65,6 +65,7 @@ mod declared_lints;
mod renamed_lints;
// begin lints modules, do not remove this comment, it’s used in `update_lints`
+mod absolute_paths;
mod allow_attributes;
mod almost_complete_range;
mod approx_const;
@@ -120,6 +121,7 @@ mod entry;
mod enum_clike;
mod enum_variants;
mod equatable_if_let;
+mod error_impl_error;
mod escape;
mod eta_reduction;
mod excessive_bools;
@@ -131,12 +133,12 @@ mod extra_unused_type_parameters;
mod fallible_impl_from;
mod float_literal;
mod floating_point_arithmetic;
-mod fn_null_check;
mod format;
mod format_args;
mod format_impl;
mod format_push_string;
mod formatting;
+mod four_forward_slashes;
mod from_over_into;
mod from_raw_with_void_ptr;
mod from_str_radix_10;
@@ -145,6 +147,7 @@ mod future_not_send;
mod if_let_mutex;
mod if_not_else;
mod if_then_some_else_none;
+mod ignored_unit_patterns;
mod implicit_hasher;
mod implicit_return;
mod implicit_saturating_add;
@@ -184,6 +187,7 @@ mod manual_assert;
mod manual_async_fn;
mod manual_bits;
mod manual_clamp;
+mod manual_float_methods;
mod manual_is_ascii_check;
mod manual_let_else;
mod manual_main_separator_str;
@@ -229,6 +233,7 @@ mod needless_for_each;
mod needless_if;
mod needless_late_init;
mod needless_parens_on_range_literals;
+mod needless_pass_by_ref_mut;
mod needless_pass_by_value;
mod needless_question_mark;
mod needless_update;
@@ -271,6 +276,7 @@ mod redundant_clone;
mod redundant_closure_call;
mod redundant_else;
mod redundant_field_names;
+mod redundant_locals;
mod redundant_pub_crate;
mod redundant_slicing;
mod redundant_static_lifetimes;
@@ -346,11 +352,10 @@ mod zero_div_zero;
mod zero_sized_map_values;
// end lints modules, do not remove this comment, it’s used in `update_lints`
+use crate::utils::conf::metadata::get_configuration_metadata;
+use crate::utils::conf::TryConf;
pub use crate::utils::conf::{lookup_conf_file, Conf};
-use crate::utils::{
- conf::{metadata::get_configuration_metadata, TryConf},
- FindAll,
-};
+use crate::utils::FindAll;
/// Register all pre expansion lints
///
@@ -663,7 +668,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
});
store.register_late_pass(move |_| Box::new(matches::Matches::new(msrv())));
let matches_for_let_else = conf.matches_for_let_else;
- store.register_late_pass(move |_| Box::new(manual_let_else::ManualLetElse::new(msrv(), matches_for_let_else)));
store.register_early_pass(move || Box::new(manual_non_exhaustive::ManualNonExhaustiveStruct::new(msrv())));
store.register_late_pass(move |_| Box::new(manual_non_exhaustive::ManualNonExhaustiveEnum::new(msrv())));
store.register_late_pass(move |_| Box::new(manual_strip::ManualStrip::new(msrv())));
@@ -723,7 +727,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(drop_forget_ref::DropForgetRef));
store.register_late_pass(|_| Box::new(empty_enum::EmptyEnum));
store.register_late_pass(|_| Box::new(invalid_upcast_comparisons::InvalidUpcastComparisons));
- store.register_late_pass(|_| Box::new(regex::Regex));
+ store.register_late_pass(|_| Box::<regex::Regex>::default());
let ignore_interior_mutability = conf.ignore_interior_mutability.clone();
store.register_late_pass(move |_| Box::new(copies::CopyAndPaste::new(ignore_interior_mutability.clone())));
store.register_late_pass(|_| Box::new(copy_iterator::CopyIterator));
@@ -772,7 +776,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::<useless_conversion::UselessConversion>::default());
store.register_late_pass(|_| Box::new(implicit_hasher::ImplicitHasher));
store.register_late_pass(|_| Box::new(fallible_impl_from::FallibleImplFrom));
- store.register_late_pass(|_| Box::<question_mark::QuestionMark>::default());
+ store.register_late_pass(move |_| Box::new(question_mark::QuestionMark::new(msrv(), matches_for_let_else)));
store.register_late_pass(|_| Box::new(question_mark_used::QuestionMarkUsed));
store.register_early_pass(|| Box::new(suspicious_operation_groupings::SuspiciousOperationGroupings));
store.register_late_pass(|_| Box::new(suspicious_trait_impl::SuspiciousImpl));
@@ -910,7 +914,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(move |_| Box::new(if_then_some_else_none::IfThenSomeElseNone::new(msrv())));
store.register_late_pass(|_| Box::new(bool_assert_comparison::BoolAssertComparison));
store.register_early_pass(move || Box::new(module_style::ModStyle));
- store.register_late_pass(|_| Box::new(unused_async::UnusedAsync));
+ store.register_late_pass(|_| Box::<unused_async::UnusedAsync>::default());
let disallowed_types = conf.disallowed_types.clone();
store.register_late_pass(move |_| Box::new(disallowed_types::DisallowedTypes::new(disallowed_types.clone())));
let import_renames = conf.enforced_import_renames.clone();
@@ -1003,7 +1007,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
semicolon_outside_block_ignore_multiline,
))
});
- store.register_late_pass(|_| Box::new(fn_null_check::FnNullCheck));
store.register_late_pass(|_| Box::new(permissions_set_readonly_false::PermissionsSetReadonlyFalse));
store.register_late_pass(|_| Box::new(size_of_ref::SizeOfRef));
store.register_late_pass(|_| Box::new(multiple_unsafe_ops_per_block::MultipleUnsafeOpsPerBlock));
@@ -1058,6 +1061,11 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
let stack_size_threshold = conf.stack_size_threshold;
store.register_late_pass(move |_| Box::new(large_stack_frames::LargeStackFrames::new(stack_size_threshold)));
store.register_late_pass(|_| Box::new(single_range_in_vec_init::SingleRangeInVecInit));
+ store.register_late_pass(move |_| {
+ Box::new(needless_pass_by_ref_mut::NeedlessPassByRefMut::new(
+ avoid_breaking_exported_api,
+ ))
+ });
store.register_late_pass(|_| Box::new(incorrect_impls::IncorrectImpls));
store.register_late_pass(move |_| {
Box::new(single_call_fn::SingleCallFn {
@@ -1074,6 +1082,19 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|_| Box::new(manual_range_patterns::ManualRangePatterns));
store.register_early_pass(|| Box::new(visibility::Visibility));
store.register_late_pass(move |_| Box::new(tuple_array_conversions::TupleArrayConversions { msrv: msrv() }));
+ store.register_late_pass(|_| Box::new(manual_float_methods::ManualFloatMethods));
+ store.register_late_pass(|_| Box::new(four_forward_slashes::FourForwardSlashes));
+ store.register_late_pass(|_| Box::new(error_impl_error::ErrorImplError));
+ let absolute_paths_max_segments = conf.absolute_paths_max_segments;
+ let absolute_paths_allowed_crates = conf.absolute_paths_allowed_crates.clone();
+ store.register_late_pass(move |_| {
+ Box::new(absolute_paths::AbsolutePaths {
+ absolute_paths_max_segments,
+ absolute_paths_allowed_crates: absolute_paths_allowed_crates.clone(),
+ })
+ });
+ store.register_late_pass(|_| Box::new(redundant_locals::RedundantLocals));
+ store.register_late_pass(|_| Box::new(ignored_unit_patterns::IgnoredUnitPatterns));
// add lints here, do not remove this comment, it's used in `new_lint`
}
diff --git a/src/tools/clippy/clippy_lints/src/lifetimes.rs b/src/tools/clippy/clippy_lints/src/lifetimes.rs
index 852f67365..0004a150d 100644
--- a/src/tools/clippy/clippy_lints/src/lifetimes.rs
+++ b/src/tools/clippy/clippy_lints/src/lifetimes.rs
@@ -15,6 +15,7 @@ use rustc_hir::{
PredicateOrigin, TraitFn, TraitItem, TraitItemKind, Ty, TyKind, WherePredicate,
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::hir::map::Map;
use rustc_middle::hir::nested_filter as middle_nested_filter;
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
@@ -620,7 +621,7 @@ impl<'cx, 'tcx, F> Visitor<'tcx> for LifetimeChecker<'cx, 'tcx, F>
where
F: NestedFilter<'tcx>,
{
- type Map = rustc_middle::hir::map::Map<'tcx>;
+ type Map = Map<'tcx>;
type NestedFilter = F;
// for lifetimes as parameters of generics
diff --git a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
index 09b2032e2..49425ff0a 100644
--- a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
+++ b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs
@@ -1,7 +1,6 @@
-use clippy_utils::{
- diagnostics::span_lint_and_then, is_diag_item_method, is_trait_method, match_def_path, path_to_local_id, paths,
- ty::match_type,
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::match_type;
+use clippy_utils::{is_diag_item_method, is_trait_method, match_def_path, path_to_local_id, paths};
use rustc_errors::Applicability;
use rustc_hir::{Body, Closure, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/literal_representation.rs b/src/tools/clippy/clippy_lints/src/literal_representation.rs
index dadcd9c51..09ca03173 100644
--- a/src/tools/clippy/clippy_lints/src/literal_representation.rs
+++ b/src/tools/clippy/clippy_lints/src/literal_representation.rs
@@ -264,7 +264,7 @@ impl LiteralDigitGrouping {
return;
}
- if Self::is_literal_uuid_formatted(&mut num_lit) {
+ if Self::is_literal_uuid_formatted(&num_lit) {
return;
}
@@ -376,7 +376,7 @@ impl LiteralDigitGrouping {
///
/// Returns `true` if the radix is hexadecimal, and the groups match the
/// UUID format of 8-4-4-4-12.
- fn is_literal_uuid_formatted(num_lit: &mut NumericLiteral<'_>) -> bool {
+ fn is_literal_uuid_formatted(num_lit: &NumericLiteral<'_>) -> bool {
if num_lit.radix != Radix::Hexadecimal {
return false;
}
diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
index 5c5a4cfce..7b8c88235 100644
--- a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs
@@ -109,7 +109,7 @@ fn is_ref_iterable<'tcx>(
&& let sig = cx.tcx.liberate_late_bound_regions(fn_id, cx.tcx.fn_sig(fn_id).skip_binder())
&& let &[req_self_ty, req_res_ty] = &**sig.inputs_and_output
&& let param_env = cx.tcx.param_env(fn_id)
- && implements_trait_with_env(cx.tcx, param_env, req_self_ty, trait_id, [])
+ && implements_trait_with_env(cx.tcx, param_env, req_self_ty, trait_id, &[])
&& let Some(into_iter_ty) =
make_normalized_projection_with_regions(cx.tcx, param_env, trait_id, sym!(IntoIter), [req_self_ty])
&& let req_res_ty = normalize_with_regions(cx.tcx, param_env, req_res_ty)
@@ -125,7 +125,7 @@ fn is_ref_iterable<'tcx>(
}
let res_ty = cx.tcx.erase_regions(EarlyBinder::bind(req_res_ty)
- .subst(cx.tcx, typeck.node_substs(call_expr.hir_id)));
+ .instantiate(cx.tcx, typeck.node_args(call_expr.hir_id)));
let mutbl = if let ty::Ref(_, _, mutbl) = *req_self_ty.kind() {
Some(mutbl)
} else {
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
index 4bb9936e9..0aaa66e6b 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs
@@ -1,14 +1,14 @@
use super::utils::make_iterator_snippet;
use super::MANUAL_FIND;
-use clippy_utils::{
- diagnostics::span_lint_and_then, higher, is_res_lang_ctor, path_res, peel_blocks_with_stmt,
- source::snippet_with_applicability, ty::implements_trait,
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{higher, is_res_lang_ctor, path_res, peel_blocks_with_stmt};
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{
- def::Res, lang_items::LangItem, BindingAnnotation, Block, Expr, ExprKind, HirId, Node, Pat, PatKind, Stmt, StmtKind,
-};
+use rustc_hir::def::Res;
+use rustc_hir::lang_items::LangItem;
+use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, Node, Pat, PatKind, Stmt, StmtKind};
use rustc_lint::LateContext;
use rustc_span::source_map::Span;
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
index 1e02a30e3..559a2c03f 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs
@@ -1,9 +1,8 @@
use super::utils::make_iterator_snippet;
use super::MANUAL_FLATTEN;
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::higher;
use clippy_utils::visitors::is_local_used;
-use clippy_utils::{path_to_local_id, peel_blocks_with_stmt};
+use clippy_utils::{higher, path_to_local_id, peel_blocks_with_stmt};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
index 7d1f8ef29..d3fd0e863 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs
@@ -60,8 +60,8 @@ pub(super) fn check<'tcx>(
o.and_then(|(lhs, rhs)| {
let rhs = fetch_cloned_expr(rhs);
if_chain! {
- if let ExprKind::Index(base_left, idx_left) = lhs.kind;
- if let ExprKind::Index(base_right, idx_right) = rhs.kind;
+ if let ExprKind::Index(base_left, idx_left, _) = lhs.kind;
+ if let ExprKind::Index(base_right, idx_right, _) = rhs.kind;
if let Some(ty) = get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_left));
if get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_right)).is_some();
if let Some((start_left, offset_left)) = get_details_from_idx(cx, idx_left, &starts);
diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs b/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs
index cb9c84be4..ca584a454 100644
--- a/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs
@@ -1,9 +1,6 @@
-use clippy_utils::{
- diagnostics::{multispan_sugg_with_applicability, span_lint_and_then},
- match_def_path, paths,
- source::snippet,
- SpanlessEq,
-};
+use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_then};
+use clippy_utils::source::snippet;
+use clippy_utils::{match_def_path, paths, SpanlessEq};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Pat, Stmt, StmtKind, UnOp};
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
index 8412875b1..7b7d19c75 100644
--- a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs
@@ -35,7 +35,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, body: &'
if let ExprKind::Block(Block { stmts: [], expr: None, ..}, _) = body.kind;
if let ExprKind::MethodCall(method, callee, ..) = unpack_cond(cond).kind;
if [sym::load, sym::compare_exchange, sym::compare_exchange_weak].contains(&method.ident.name);
- if let ty::Adt(def, _substs) = cx.typeck_results().expr_ty(callee).kind();
+ if let ty::Adt(def, _args) = cx.typeck_results().expr_ty(callee).kind();
if cx.tcx.is_diagnostic_item(sym::AtomicBool, def.did());
then {
span_lint_and_sugg(
@@ -43,7 +43,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, body: &'
MISSING_SPIN_LOOP,
body.span,
"busy-waiting loop should at least have a spin loop hint",
- "try this",
+ "try",
(if is_no_std_crate(cx) {
"{ core::hint::spin_loop() }"
} else {
diff --git a/src/tools/clippy/clippy_lints/src/loops/mod.rs b/src/tools/clippy/clippy_lints/src/loops/mod.rs
index 529189b52..ffd29ab76 100644
--- a/src/tools/clippy/clippy_lints/src/loops/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/mod.rs
@@ -601,7 +601,7 @@ declare_clippy_lint! {
/// // use `number`
/// }
/// ```
- #[clippy::version = "1.70.0"]
+ #[clippy::version = "1.71.0"]
pub MANUAL_WHILE_LET_SOME,
style,
"checking for emptiness of a `Vec` in the loop condition and popping an element in the body"
diff --git a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
index 4dae93f60..b83d148b5 100644
--- a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs
@@ -7,7 +7,8 @@ use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, Node, PatKind};
use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
-use rustc_middle::{mir::FakeReadCause, ty};
+use rustc_middle::mir::FakeReadCause;
+use rustc_middle::ty;
use rustc_span::source_map::Span;
pub(super) fn check(cx: &LateContext<'_>, arg: &Expr<'_>, body: &Expr<'_>) {
diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
index cb4465675..c4af46b8f 100644
--- a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs
@@ -319,7 +319,7 @@ impl<'a, 'tcx> Visitor<'tcx> for VarVisitor<'a, 'tcx> {
if_chain! {
// an index op
- if let ExprKind::Index(seqexpr, idx) = expr.kind;
+ if let ExprKind::Index(seqexpr, idx, _) = expr.kind;
if !self.check(idx, seqexpr, expr);
then {
return;
@@ -370,7 +370,7 @@ impl<'a, 'tcx> Visitor<'tcx> for VarVisitor<'a, 'tcx> {
ExprKind::MethodCall(_, receiver, args, _) => {
let def_id = self.cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
for (ty, expr) in iter::zip(
- self.cx.tcx.fn_sig(def_id).subst_identity().inputs().skip_binder(),
+ self.cx.tcx.fn_sig(def_id).instantiate_identity().inputs().skip_binder(),
std::iter::once(receiver).chain(args.iter()),
) {
self.prefer_mutable = false;
diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
index ee338c6be..cc19ac55e 100644
--- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs
@@ -1,9 +1,9 @@
use super::utils::make_iterator_snippet;
use super::NEVER_LOOP;
-use clippy_utils::consts::constant;
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher::ForLoop;
use clippy_utils::source::snippet;
-use clippy_utils::{consts::Constant, diagnostics::span_lint_and_then};
use rustc_errors::Applicability;
use rustc_hir::{Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Pat, Stmt, StmtKind};
use rustc_lint::LateContext;
@@ -162,7 +162,9 @@ fn never_loop_expr<'tcx>(
ExprKind::Binary(_, e1, e2)
| ExprKind::Assign(e1, e2, _)
| ExprKind::AssignOp(_, e1, e2)
- | ExprKind::Index(e1, e2) => never_loop_expr_all(cx, &mut [e1, e2].iter().copied(), ignore_ids, main_loop_id),
+ | ExprKind::Index(e1, e2, _) => {
+ never_loop_expr_all(cx, &mut [e1, e2].iter().copied(), ignore_ids, main_loop_id)
+ },
ExprKind::Loop(b, _, _, _) => {
// Break can come from the inner loop so remove them.
absorb_break(never_loop_block(cx, b, ignore_ids, main_loop_id))
diff --git a/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs b/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
index 744fd61bd..dfb800ccf 100644
--- a/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/single_element_loop.rs
@@ -9,6 +9,7 @@ use rustc_errors::Applicability;
use rustc_hir::{is_range_literal, BorrowKind, Expr, ExprKind, Pat};
use rustc_lint::LateContext;
use rustc_span::edition::Edition;
+use rustc_span::sym;
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
@@ -51,7 +52,7 @@ pub(super) fn check<'tcx>(
},
[],
_,
- ) if method.ident.name.as_str() == "iter_mut" => (arg, "&mut "),
+ ) if method.ident.name == sym::iter_mut => (arg, "&mut "),
ExprKind::MethodCall(
method,
Expr {
diff --git a/src/tools/clippy/clippy_lints/src/loops/utils.rs b/src/tools/clippy/clippy_lints/src/loops/utils.rs
index 28ee24309..6edca2d55 100644
--- a/src/tools/clippy/clippy_lints/src/loops/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/utils.rs
@@ -76,7 +76,7 @@ impl<'a, 'tcx> Visitor<'tcx> for IncrementVisitor<'a, 'tcx> {
ExprKind::Assign(lhs, _, _) if lhs.hir_id == expr.hir_id => {
*state = IncrementVisitorVarState::DontWarn;
},
- ExprKind::AddrOf(BorrowKind::Ref, mutability, _) if mutability == Mutability::Mut => {
+ ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _) => {
*state = IncrementVisitorVarState::DontWarn;
},
_ => (),
@@ -226,7 +226,7 @@ impl<'a, 'tcx> Visitor<'tcx> for InitializeVisitor<'a, 'tcx> {
InitializeVisitorState::DontWarn
}
},
- ExprKind::AddrOf(BorrowKind::Ref, mutability, _) if mutability == Mutability::Mut => {
+ ExprKind::AddrOf(BorrowKind::Ref, Mutability::Mut, _) => {
self.state = InitializeVisitorState::DontWarn;
},
_ => (),
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
index d1a1f773f..7f24f3c5d 100644
--- a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
@@ -6,8 +6,7 @@ use if_chain::if_chain;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefIdMap;
use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::HirIdSet;
-use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_hir::{Expr, ExprKind, HirIdSet, QPath};
use rustc_lint::LateContext;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) {
diff --git a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
index 55989f8a4..5153070cf 100644
--- a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
+++ b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs
@@ -1,18 +1,18 @@
use super::WHILE_LET_ON_ITERATOR;
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::higher;
use clippy_utils::source::snippet_with_applicability;
-use clippy_utils::{
- get_enclosing_loop_or_multi_call_closure, is_refutable, is_res_lang_ctor, is_trait_method, visitors::is_res_used,
-};
+use clippy_utils::visitors::is_res_used;
+use clippy_utils::{get_enclosing_loop_or_multi_call_closure, higher, is_refutable, is_res_lang_ctor, is_trait_method};
use if_chain::if_chain;
use rustc_errors::Applicability;
+use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::{def::Res, Closure, Expr, ExprKind, HirId, LangItem, Local, Mutability, PatKind, UnOp};
+use rustc_hir::{Closure, Expr, ExprKind, HirId, LangItem, Local, Mutability, PatKind, UnOp};
use rustc_lint::LateContext;
use rustc_middle::hir::nested_filter::OnlyBodies;
use rustc_middle::ty::adjustment::Adjust;
-use rustc_span::{symbol::sym, Symbol};
+use rustc_span::symbol::sym;
+use rustc_span::Symbol;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
let (scrutinee_expr, iter_expr_struct, iter_expr, some_pat, loop_expr) = if_chain! {
@@ -113,7 +113,7 @@ fn try_parse_iter_expr(cx: &LateContext<'_>, mut e: &Expr<'_>) -> Option<IterExp
// Shouldn't have side effects, but there's no way to trace which field is used. So forget which fields have
// already been seen.
- ExprKind::Index(base, idx) if !idx.can_have_side_effects() => {
+ ExprKind::Index(base, idx, _) if !idx.can_have_side_effects() => {
can_move = false;
fields.clear();
e = base;
@@ -332,7 +332,7 @@ fn needs_mutable_borrow(cx: &LateContext<'_>, iter_expr: &IterExpr, loop_expr: &
if let Some(e) = get_enclosing_loop_or_multi_call_closure(cx, loop_expr) {
let Res::Local(local_id) = iter_expr.path else {
- return true
+ return true;
};
let mut v = NestedLoopVisitor {
cx,
diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs
index 8e322a979..9b158f18f 100644
--- a/src/tools/clippy/clippy_lints/src/macro_use.rs
+++ b/src/tools/clippy/clippy_lints/src/macro_use.rs
@@ -8,7 +8,8 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{edition::Edition, sym, Span};
+use rustc_span::edition::Edition;
+use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/manual_bits.rs b/src/tools/clippy/clippy_lints/src/manual_bits.rs
index 4629b22d1..6c7c57ba1 100644
--- a/src/tools/clippy/clippy_lints/src/manual_bits.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_bits.rs
@@ -110,7 +110,7 @@ fn get_size_of_ty<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<
if let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id();
if cx.tcx.is_diagnostic_item(sym::mem_size_of, def_id);
then {
- cx.typeck_results().node_substs(count_func.hir_id).types().next().map(|resolved_ty| (*real_ty, resolved_ty))
+ cx.typeck_results().node_args(count_func.hir_id).types().next().map(|resolved_ty| (*real_ty, resolved_ty))
} else {
None
}
diff --git a/src/tools/clippy/clippy_lints/src/manual_clamp.rs b/src/tools/clippy/clippy_lints/src/manual_clamp.rs
index 440362b96..e75666e61 100644
--- a/src/tools/clippy/clippy_lints/src/manual_clamp.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_clamp.rs
@@ -4,21 +4,19 @@ use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::implements_trait;
use clippy_utils::visitors::is_const_evaluatable;
-use clippy_utils::MaybePath;
use clippy_utils::{
eq_expr_value, in_constant, is_diag_trait_item, is_trait_method, path_res, path_to_local_id, peel_blocks,
- peel_blocks_with_stmt,
+ peel_blocks_with_stmt, MaybePath,
};
use itertools::Itertools;
-use rustc_errors::Applicability;
-use rustc_errors::Diagnostic;
-use rustc_hir::{
- def::Res, Arm, BinOpKind, Block, Expr, ExprKind, Guard, HirId, PatKind, PathSegment, PrimTy, QPath, StmtKind,
-};
+use rustc_errors::{Applicability, Diagnostic};
+use rustc_hir::def::Res;
+use rustc_hir::{Arm, BinOpKind, Block, Expr, ExprKind, Guard, HirId, PatKind, PathSegment, PrimTy, QPath, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{symbol::sym, Span};
+use rustc_span::symbol::sym;
+use rustc_span::Span;
use std::ops::Deref;
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/manual_float_methods.rs b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs
new file mode 100644
index 000000000..88db7ae6a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs
@@ -0,0 +1,175 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::{is_from_proc_macro, path_to_local};
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Constness, Expr, ExprKind};
+use rustc_lint::{LateContext, LateLintPass, Lint, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual `is_infinite` reimplementations
+ /// (i.e., `x == <float>::INFINITY || x == <float>::NEG_INFINITY`).
+ ///
+ /// ### Why is this bad?
+ /// The method `is_infinite` is shorter and more readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1.0f32;
+ /// if x == f32::INFINITY || x == f32::NEG_INFINITY {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let x = 1.0f32;
+ /// if x.is_infinite() {}
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub MANUAL_IS_INFINITE,
+ style,
+ "use dedicated method to check if a float is infinite"
+}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for manual `is_finite` reimplementations
+ /// (i.e., `x != <float>::INFINITY && x != <float>::NEG_INFINITY`).
+ ///
+ /// ### Why is this bad?
+ /// The method `is_finite` is shorter and more readable.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let x = 1.0f32;
+ /// if x != f32::INFINITY && x != f32::NEG_INFINITY {}
+ /// if x.abs() < f32::INFINITY {}
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let x = 1.0f32;
+ /// if x.is_finite() {}
+ /// if x.is_finite() {}
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub MANUAL_IS_FINITE,
+ style,
+ "use dedicated method to check if a float is finite"
+}
+declare_lint_pass!(ManualFloatMethods => [MANUAL_IS_INFINITE, MANUAL_IS_FINITE]);
+
+#[derive(Clone, Copy)]
+enum Variant {
+ ManualIsInfinite,
+ ManualIsFinite,
+}
+
+impl Variant {
+ pub fn lint(self) -> &'static Lint {
+ match self {
+ Self::ManualIsInfinite => MANUAL_IS_INFINITE,
+ Self::ManualIsFinite => MANUAL_IS_FINITE,
+ }
+ }
+
+ pub fn msg(self) -> &'static str {
+ match self {
+ Self::ManualIsInfinite => "manually checking if a float is infinite",
+ Self::ManualIsFinite => "manually checking if a float is finite",
+ }
+ }
+}
+
+impl<'tcx> LateLintPass<'tcx> for ManualFloatMethods {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
+ if !in_external_macro(cx.sess(), expr.span)
+ && (
+ matches!(cx.tcx.constness(cx.tcx.hir().enclosing_body_owner(expr.hir_id)), Constness::NotConst)
+ || cx.tcx.features().active(sym!(const_float_classify))
+ ) && let ExprKind::Binary(kind, lhs, rhs) = expr.kind
+ && let ExprKind::Binary(lhs_kind, lhs_lhs, lhs_rhs) = lhs.kind
+ && let ExprKind::Binary(rhs_kind, rhs_lhs, rhs_rhs) = rhs.kind
+ // Checking all possible scenarios using a function would be a hopeless task, as we have
+ // 16 possible alignments of constants/operands. For now, let's use `partition`.
+ && let (operands, constants) = [lhs_lhs, lhs_rhs, rhs_lhs, rhs_rhs]
+ .into_iter()
+ .partition::<Vec<&Expr<'_>>, _>(|i| path_to_local(i).is_some())
+ && let [first, second] = &*operands
+ && let Some([const_1, const_2]) = constants
+ .into_iter()
+ .map(|i| constant(cx, cx.typeck_results(), i))
+ .collect::<Option<Vec<_>>>()
+ .as_deref()
+ && path_to_local(first).is_some_and(|f| path_to_local(second).is_some_and(|s| f == s))
+ // The actual infinity check, we also allow `NEG_INFINITY` before` INFINITY` just in
+ // case somebody does that for some reason
+ && (is_infinity(const_1) && is_neg_infinity(const_2)
+ || is_neg_infinity(const_1) && is_infinity(const_2))
+ && !is_from_proc_macro(cx, expr)
+ && let Some(local_snippet) = snippet_opt(cx, first.span)
+ {
+ let variant = match (kind.node, lhs_kind.node, rhs_kind.node) {
+ (BinOpKind::Or, BinOpKind::Eq, BinOpKind::Eq) => Variant::ManualIsInfinite,
+ (BinOpKind::And, BinOpKind::Ne, BinOpKind::Ne) => Variant::ManualIsFinite,
+ _ => return,
+ };
+
+ span_lint_and_then(
+ cx,
+ variant.lint(),
+ expr.span,
+ variant.msg(),
+ |diag| {
+ match variant {
+ Variant::ManualIsInfinite => {
+ diag.span_suggestion(
+ expr.span,
+ "use the dedicated method instead",
+ format!("{local_snippet}.is_infinite()"),
+ Applicability::MachineApplicable,
+ );
+ },
+ Variant::ManualIsFinite => {
+ // TODO: There's probably some better way to do this, i.e., create
+ // multiple suggestions with notes between each of them
+ diag.span_suggestion_verbose(
+ expr.span,
+ "use the dedicated method instead",
+ format!("{local_snippet}.is_finite()"),
+ Applicability::MaybeIncorrect,
+ )
+ .span_suggestion_verbose(
+ expr.span,
+ "this will alter how it handles NaN; if that is a problem, use instead",
+ format!("{local_snippet}.is_finite() || {local_snippet}.is_nan()"),
+ Applicability::MaybeIncorrect,
+ )
+ .span_suggestion_verbose(
+ expr.span,
+ "or, for conciseness",
+ format!("!{local_snippet}.is_infinite()"),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ }
+ },
+ );
+ }
+ }
+}
+
+fn is_infinity(constant: &Constant<'_>) -> bool {
+ match constant {
+ Constant::F32(float) => *float == f32::INFINITY,
+ Constant::F64(float) => *float == f64::INFINITY,
+ _ => false,
+ }
+}
+
+fn is_neg_infinity(constant: &Constant<'_>) -> bool {
+ match constant {
+ Constant::F32(float) => *float == f32::NEG_INFINITY,
+ Constant::F64(float) => *float == f64::NEG_INFINITY,
+ _ => false,
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
index 31264261f..f26442447 100644
--- a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs
@@ -1,12 +1,16 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::macros::root_macro_call;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::{diagnostics::span_lint_and_sugg, higher, in_constant, macros::root_macro_call, sugg::Sugg};
+use clippy_utils::sugg::Sugg;
+use clippy_utils::{higher, in_constant};
use rustc_ast::ast::RangeLimits;
use rustc_ast::LitKind::{Byte, Char};
use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, PatKind, RangeEnd};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{def_id::DefId, sym, Span};
+use rustc_span::def_id::DefId;
+use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/manual_let_else.rs b/src/tools/clippy/clippy_lints/src/manual_let_else.rs
index 59e421c16..c531137b7 100644
--- a/src/tools/clippy/clippy_lints/src/manual_let_else.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_let_else.rs
@@ -1,18 +1,17 @@
+use crate::question_mark::{QuestionMark, QUESTION_MARK};
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher::IfLetOrMatch;
-use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::peel_blocks;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::visitors::{Descend, Visitable};
-use if_chain::if_chain;
+use clippy_utils::{is_lint_allowed, msrvs, pat_and_expr_can_be_question_mark, peel_blocks};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{Expr, ExprKind, HirId, ItemId, Local, MatchSource, Pat, PatKind, QPath, Stmt, StmtKind, Ty};
-use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_lint::{LateContext, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_session::declare_tool_lint;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;
use serde::Deserialize;
@@ -50,25 +49,8 @@ declare_clippy_lint! {
"manual implementation of a let...else statement"
}
-pub struct ManualLetElse {
- msrv: Msrv,
- matches_behaviour: MatchLintBehaviour,
-}
-
-impl ManualLetElse {
- #[must_use]
- pub fn new(msrv: Msrv, matches_behaviour: MatchLintBehaviour) -> Self {
- Self {
- msrv,
- matches_behaviour,
- }
- }
-}
-
-impl_lint_pass!(ManualLetElse => [MANUAL_LET_ELSE]);
-
-impl<'tcx> LateLintPass<'tcx> for ManualLetElse {
- fn check_stmt(&mut self, cx: &LateContext<'_>, stmt: &'tcx Stmt<'tcx>) {
+impl<'tcx> QuestionMark {
+ pub(crate) fn check_manual_let_else(&mut self, cx: &LateContext<'_>, stmt: &'tcx Stmt<'tcx>) {
if !self.msrv.meets(msrvs::LET_ELSE) || in_external_macro(cx.sess(), stmt.span) {
return;
}
@@ -81,11 +63,14 @@ impl<'tcx> LateLintPass<'tcx> for ManualLetElse {
let Some(if_let_or_match) = IfLetOrMatch::parse(cx, init)
{
match if_let_or_match {
- IfLetOrMatch::IfLet(if_let_expr, let_pat, if_then, if_else) => if_chain! {
- if let Some(ident_map) = expr_simple_identity_map(local.pat, let_pat, if_then);
- if let Some(if_else) = if_else;
- if expr_diverges(cx, if_else);
- then {
+ IfLetOrMatch::IfLet(if_let_expr, let_pat, if_then, if_else) => {
+ if
+ let Some(ident_map) = expr_simple_identity_map(local.pat, let_pat, if_then) &&
+ let Some(if_else) = if_else &&
+ expr_diverges(cx, if_else) &&
+ let qm_allowed = is_lint_allowed(cx, QUESTION_MARK, stmt.hir_id) &&
+ (qm_allowed || pat_and_expr_can_be_question_mark(cx, let_pat, if_else).is_none())
+ {
emit_manual_let_else(cx, stmt.span, if_let_expr, &ident_map, let_pat, if_else);
}
},
@@ -128,8 +113,6 @@ impl<'tcx> LateLintPass<'tcx> for ManualLetElse {
}
};
}
-
- extract_msrv_attr!(LateContext);
}
fn emit_manual_let_else(
@@ -208,7 +191,9 @@ fn replace_in_pattern(
match pat.kind {
PatKind::Binding(_ann, _id, binding_name, opt_subpt) => {
- let Some(pat_to_put) = ident_map.get(&binding_name.name) else { break 'a };
+ let Some(pat_to_put) = ident_map.get(&binding_name.name) else {
+ break 'a;
+ };
let (sn_ptp, _) = snippet_with_context(cx, pat_to_put.span, span.ctxt(), "", app);
if let Some(subpt) = opt_subpt {
let subpt = replace_in_pattern(cx, span, ident_map, subpt, app, false);
diff --git a/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs b/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs
index 65ff55520..39d8b20d3 100644
--- a/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs
@@ -2,12 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use rustc_ast::LitKind;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
-use rustc_hir::Expr;
-use rustc_hir::ExprKind;
-use rustc_hir::PatKind;
-use rustc_hir::RangeEnd;
-use rustc_lint::LintContext;
-use rustc_lint::{LateContext, LateLintPass};
+use rustc_hir::{Expr, ExprKind, PatKind, RangeEnd, UnOp};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
@@ -19,6 +15,10 @@ declare_clippy_lint! {
/// ### Why is this bad?
/// Using an explicit range is more concise and easier to read.
///
+ /// ### Known issues
+ /// This lint intentionally does not handle numbers greater than `i128::MAX` for `u128` literals
+ /// in order to support negative numbers.
+ ///
/// ### Example
/// ```rust
/// let x = 6;
@@ -36,11 +36,14 @@ declare_clippy_lint! {
}
declare_lint_pass!(ManualRangePatterns => [MANUAL_RANGE_PATTERNS]);
-fn expr_as_u128(expr: &Expr<'_>) -> Option<u128> {
- if let ExprKind::Lit(lit) = expr.kind
+fn expr_as_i128(expr: &Expr<'_>) -> Option<i128> {
+ if let ExprKind::Unary(UnOp::Neg, expr) = expr.kind {
+ expr_as_i128(expr).map(|num| -num)
+ } else if let ExprKind::Lit(lit) = expr.kind
&& let LitKind::Int(num, _) = lit.node
{
- Some(num)
+ // Intentionally not handling numbers greater than i128::MAX (for u128 literals) for now.
+ num.try_into().ok()
} else {
None
}
@@ -56,22 +59,22 @@ impl LateLintPass<'_> for ManualRangePatterns {
if let PatKind::Or(pats) = pat.kind
&& pats.len() >= 3
{
- let mut min = u128::MAX;
- let mut max = 0;
+ let mut min = i128::MAX;
+ let mut max = i128::MIN;
let mut numbers_found = FxHashSet::default();
let mut ranges_found = Vec::new();
for pat in pats {
if let PatKind::Lit(lit) = pat.kind
- && let Some(num) = expr_as_u128(lit)
+ && let Some(num) = expr_as_i128(lit)
{
numbers_found.insert(num);
min = min.min(num);
max = max.max(num);
} else if let PatKind::Range(Some(left), Some(right), end) = pat.kind
- && let Some(left) = expr_as_u128(left)
- && let Some(right) = expr_as_u128(right)
+ && let Some(left) = expr_as_i128(left)
+ && let Some(right) = expr_as_i128(right)
&& right >= left
{
min = min.min(left);
diff --git a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
index aafee9271..0e89ca132 100644
--- a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs
@@ -119,7 +119,7 @@ fn check_for_either_unsigned_int_constant<'a>(
}
fn check_for_unsigned_int_constant<'a>(cx: &'a LateContext<'_>, expr: &'a Expr<'_>) -> Option<u128> {
- let Some(int_const) = constant_full_int(cx, cx.typeck_results(), expr) else { return None };
+ let int_const = constant_full_int(cx, cx.typeck_results(), expr)?;
match int_const {
FullInt::S(s) => s.try_into().ok(),
FullInt::U(u) => Some(u),
diff --git a/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs b/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
index 703a6b258..f97600b53 100644
--- a/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_slice_size_calculation.rs
@@ -92,7 +92,7 @@ fn simplify_half<'tcx>(
&& let ExprKind::Path(ref func_qpath) = func.kind
&& let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
&& cx.tcx.is_diagnostic_item(sym::mem_size_of, def_id)
- && let Some(ty2) = cx.typeck_results().node_substs(func.hir_id).types().next()
+ && let Some(ty2) = cx.typeck_results().node_args(func.hir_id).types().next()
// T1 == T2?
&& *ty1 == ty2
{
diff --git a/src/tools/clippy/clippy_lints/src/manual_strip.rs b/src/tools/clippy/clippy_lints/src/manual_strip.rs
index 93d977a5c..201bb56ef 100644
--- a/src/tools/clippy/clippy_lints/src/manual_strip.rs
+++ b/src/tools/clippy/clippy_lints/src/manual_strip.rs
@@ -8,8 +8,7 @@ use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::BinOpKind;
-use rustc_hir::{BorrowKind, Expr, ExprKind};
+use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
@@ -205,7 +204,7 @@ fn find_stripping<'tcx>(
if_chain! {
if is_ref_str(self.cx, ex);
let unref = peel_ref(ex);
- if let ExprKind::Index(indexed, index) = &unref.kind;
+ if let ExprKind::Index(indexed, index, _) = &unref.kind;
if let Some(higher::Range { start, end, .. }) = higher::Range::hir(index);
if let ExprKind::Path(path) = &indexed.kind;
if self.cx.qpath_res(path, ex.hir_id) == self.target;
diff --git a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
index edcab6968..f0a0f482a 100644
--- a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs
@@ -104,7 +104,7 @@ fn is_unit_function(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
let ty = cx.typeck_results().expr_ty(expr);
if let ty::FnDef(id, _) = *ty.kind() {
- if let Some(fn_type) = cx.tcx.fn_sig(id).subst_identity().no_bound_vars() {
+ if let Some(fn_type) = cx.tcx.fn_sig(id).instantiate_identity().no_bound_vars() {
return is_unit_type(fn_type.output());
}
}
@@ -226,7 +226,7 @@ fn lint_map_unit_fn(
);
span_lint_and_then(cx, lint, expr.span, &msg, |diag| {
- diag.span_suggestion(stmt.span, "try this", suggestion, applicability);
+ diag.span_suggestion(stmt.span, "try", suggestion, applicability);
});
} else if let Some((binding, closure_expr)) = unit_closure(cx, fn_arg) {
let msg = suggestion_msg("closure", map_type);
@@ -241,7 +241,7 @@ fn lint_map_unit_fn(
snippet_with_applicability(cx, var_arg.span, "_", &mut applicability),
snippet_with_context(cx, reduced_expr_span, var_arg.span.ctxt(), "_", &mut applicability).0,
);
- diag.span_suggestion(stmt.span, "try this", suggestion, applicability);
+ diag.span_suggestion(stmt.span, "try", suggestion, applicability);
} else {
let suggestion = format!(
"if let {0}({1}) = {2} {{ ... }}",
@@ -249,7 +249,7 @@ fn lint_map_unit_fn(
snippet(cx, binding.pat.span, "_"),
snippet(cx, var_arg.span, "_"),
);
- diag.span_suggestion(stmt.span, "try this", suggestion, Applicability::HasPlaceholders);
+ diag.span_suggestion(stmt.span, "try", suggestion, Applicability::HasPlaceholders);
}
});
}
diff --git a/src/tools/clippy/clippy_lints/src/match_result_ok.rs b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
index 6ec978403..841c020f2 100644
--- a/src/tools/clippy/clippy_lints/src/match_result_ok.rs
+++ b/src/tools/clippy/clippy_lints/src/match_result_ok.rs
@@ -1,8 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::higher;
-use clippy_utils::is_res_lang_ctor;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{higher, is_res_lang_ctor};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, PatKind};
diff --git a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
index d18c92cab..3329f93b7 100644
--- a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs
@@ -28,7 +28,7 @@ pub(crate) fn check(cx: &LateContext<'_>, local: &Local<'_>) -> bool {
local.span,
"you seem to be trying to use `match` to destructure a single infallible pattern. \
Consider using `let`",
- "try this",
+ "try",
format!(
"let {}({}{}) = {};",
snippet_with_applicability(cx, variant_name.span, "..", &mut applicability),
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs
index f6bf0e7aa..e0181a475 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs
@@ -143,7 +143,7 @@ fn check<'tcx>(
MANUAL_FILTER,
expr.span,
"manual implementation of `Option::filter`",
- "try this",
+ "try",
if sugg_info.needs_brackets {
format!(
"{{ {}{}.filter({body_str}) }}",
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_map.rs b/src/tools/clippy/clippy_lints/src/matches/manual_map.rs
index aaba23967..ed3d8b09f 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_map.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_map.rs
@@ -58,7 +58,7 @@ fn check<'tcx>(
MANUAL_MAP,
expr.span,
"manual implementation of `Option::map`",
- "try this",
+ "try",
if sugg_info.needs_brackets {
format!(
"{{ {}{}.map({}) }}",
diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs b/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs
index 5b7644a53..6b611f567 100644
--- a/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs
@@ -1,14 +1,17 @@
-use crate::{map_unit_fn::OPTION_MAP_UNIT_FN, matches::MATCH_AS_REF};
+use crate::map_unit_fn::OPTION_MAP_UNIT_FN;
+use crate::matches::MATCH_AS_REF;
use clippy_utils::source::{snippet_with_applicability, snippet_with_context};
+use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_copy, is_type_diagnostic_item, peel_mid_ty_refs_is_mutable, type_is_unsafe_function};
use clippy_utils::{
can_move_expr_to_closure, is_else_clause, is_lint_allowed, is_res_lang_ctor, path_res, path_to_local_id,
- peel_blocks, peel_hir_expr_refs, peel_hir_expr_while, sugg::Sugg, CaptureKind,
+ peel_blocks, peel_hir_expr_refs, peel_hir_expr_while, CaptureKind,
};
use rustc_ast::util::parser::PREC_POSTFIX;
use rustc_errors::Applicability;
+use rustc_hir::def::Res;
use rustc_hir::LangItem::{OptionNone, OptionSome};
-use rustc_hir::{def::Res, BindingAnnotation, Expr, ExprKind, HirId, Mutability, Pat, PatKind, Path, QPath};
+use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, Mutability, Pat, PatKind, Path, QPath};
use rustc_lint::LateContext;
use rustc_span::{sym, SyntaxContext};
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
index 2818f030b..d51cca040 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs
@@ -27,10 +27,10 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr:
let input_ty = cx.typeck_results().expr_ty(ex);
let cast = if_chain! {
- if let ty::Adt(_, substs) = input_ty.kind();
- let input_ty = substs.type_at(0);
- if let ty::Adt(_, substs) = output_ty.kind();
- let output_ty = substs.type_at(0);
+ if let ty::Adt(_, args) = input_ty.kind();
+ let input_ty = args.type_at(0);
+ if let ty::Adt(_, args) = output_ty.kind();
+ let output_ty = args.type_at(0);
if let ty::Ref(_, output_ty, _) = *output_ty.kind();
if input_ty != output_ty;
then {
@@ -46,7 +46,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr:
MATCH_AS_REF,
expr.span,
&format!("use `{suggestion}()` instead"),
- "try this",
+ "try",
format!(
"{}.{suggestion}(){cast}",
snippet_with_applicability(cx, ex.span, "_", &mut applicability),
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
index 0064619ef..e2ddf11ab 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs
@@ -1,9 +1,7 @@
use super::REDUNDANT_PATTERN_MATCHING;
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::is_lint_allowed;
-use clippy_utils::is_wild;
use clippy_utils::source::snippet_with_applicability;
-use clippy_utils::span_contains_comment;
+use clippy_utils::{is_lint_allowed, is_wild, span_contains_comment};
use rustc_ast::{Attribute, LitKind};
use rustc_errors::Applicability;
use rustc_hir::{Arm, BorrowKind, Expr, ExprKind, Guard, Pat, PatKind, QPath};
@@ -139,7 +137,7 @@ where
MATCH_LIKE_MATCHES_MACRO,
expr.span,
&format!("{} expression looks like `matches!` macro", if is_if_let { "if let .. else" } else { "match" }),
- "try this",
+ "try",
format!(
"{}matches!({}, {pat_and_guard})",
if b0 { "" } else { "!" },
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
index 2917f85c4..bd53ebd48 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs
@@ -12,7 +12,7 @@ use super::MATCH_ON_VEC_ITEMS;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) {
if_chain! {
if let Some(idx_expr) = is_vec_indexing(cx, scrutinee);
- if let ExprKind::Index(vec, idx) = idx_expr.kind;
+ if let ExprKind::Index(vec, idx, _) = idx_expr.kind;
then {
// FIXME: could be improved to suggest surrounding every pattern with Some(_),
@@ -22,7 +22,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) {
MATCH_ON_VEC_ITEMS,
scrutinee.span,
"indexing into a vector may panic",
- "try this",
+ "try",
format!(
"{}.get({})",
snippet(cx, vec.span, ".."),
@@ -36,7 +36,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) {
fn is_vec_indexing<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
if_chain! {
- if let ExprKind::Index(array, index) = expr.kind;
+ if let ExprKind::Index(array, index, _) = expr.kind;
if is_vector(cx, array);
if !is_full_range(cx, index);
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
index 3d2fbea63..6fc79fadd 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs
@@ -2,8 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet;
use clippy_utils::{is_lint_allowed, path_to_local, search_same, SpanlessEq, SpanlessHash};
use core::cmp::Ordering;
-use core::iter;
-use core::slice;
+use core::{iter, slice};
use rustc_arena::DroplessArena;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
@@ -240,7 +239,7 @@ impl<'a> NormalizedPat<'a> {
},
PatKind::TupleStruct(ref path, pats, wild_idx) => {
let Some(adt) = cx.typeck_results().pat_ty(pat).ty_adt_def() else {
- return Self::Wild
+ return Self::Wild;
};
let (var_id, variant) = if adt.is_enum() {
match cx.qpath_res(path, pat.hir_id).opt_def_id() {
diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs
index 3126b5901..8d22ceb47 100644
--- a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs
@@ -143,7 +143,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) {
MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
wildcard_span,
"wildcard matches only a single variant and will also match any future added variants",
- "try this",
+ "try",
format_suggestion(x),
Applicability::MaybeIncorrect,
),
@@ -161,7 +161,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) {
WILDCARD_ENUM_MATCH_ARM,
wildcard_span,
message,
- "try this",
+ "try",
suggestions.join(" | "),
Applicability::MaybeIncorrect,
);
diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs
index 00fa3eb9b..930386a60 100644
--- a/src/tools/clippy/clippy_lints/src/matches/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs
@@ -16,6 +16,7 @@ mod match_wild_enum;
mod match_wild_err_arm;
mod needless_match;
mod overlapping_arms;
+mod redundant_guards;
mod redundant_pattern_match;
mod rest_pat_in_fully_bound_struct;
mod significant_drop_in_scrutinee;
@@ -936,6 +937,36 @@ declare_clippy_lint! {
"reimplementation of `filter`"
}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for unnecessary guards in match expressions.
+ ///
+ /// ### Why is this bad?
+ /// It's more complex and much less readable. Making it part of the pattern can improve
+ /// exhaustiveness checking as well.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// match x {
+ /// Some(x) if matches!(x, Some(1)) => ..,
+ /// Some(x) if x == Some(2) => ..,
+ /// _ => todo!(),
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// match x {
+ /// Some(Some(1)) => ..,
+ /// Some(Some(2)) => ..,
+ /// _ => todo!(),
+ /// }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub REDUNDANT_GUARDS,
+ complexity,
+ "checks for unnecessary guards in match expressions"
+}
+
#[derive(Default)]
pub struct Matches {
msrv: Msrv,
@@ -978,6 +1009,7 @@ impl_lint_pass!(Matches => [
TRY_ERR,
MANUAL_MAP,
MANUAL_FILTER,
+ REDUNDANT_GUARDS,
]);
impl<'tcx> LateLintPass<'tcx> for Matches {
@@ -1006,7 +1038,7 @@ impl<'tcx> LateLintPass<'tcx> for Matches {
wild_in_or_pats::check(cx, arms);
}
- if source == MatchSource::TryDesugar {
+ if let MatchSource::TryDesugar(_) = source {
try_err::check(cx, expr, ex);
}
@@ -1025,6 +1057,7 @@ impl<'tcx> LateLintPass<'tcx> for Matches {
needless_match::check_match(cx, ex, arms, expr);
match_on_vec_items::check(cx, ex);
match_str_case_mismatch::check(cx, ex, arms);
+ redundant_guards::check(cx, arms);
if !in_constant(cx, expr.hir_id) {
manual_unwrap_or::check(cx, expr, ex, arms);
@@ -1125,8 +1158,8 @@ fn contains_cfg_arm(cx: &LateContext<'_>, e: &Expr<'_>, scrutinee: &Expr<'_>, ar
//|^
let found = arm_spans.try_fold(start, |start, range| {
let Some((end, next_start)) = range else {
- // Shouldn't happen as macros can't expand to match arms, but treat this as though a `cfg` attribute were
- // found.
+ // Shouldn't happen as macros can't expand to match arms, but treat this as though a `cfg` attribute
+ // were found.
return Err(());
};
let span = SpanData {
diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs
new file mode 100644
index 000000000..29af48123
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs
@@ -0,0 +1,196 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::path_to_local;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::visitors::{for_each_expr, is_local_used};
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Arm, BinOpKind, Expr, ExprKind, Guard, MatchSource, Node, Pat, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+use std::ops::ControlFlow;
+
+use super::REDUNDANT_GUARDS;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'tcx>]) {
+ for outer_arm in arms {
+ let Some(guard) = outer_arm.guard else {
+ continue;
+ };
+
+ // `Some(x) if matches!(x, y)`
+ if let Guard::If(if_expr) = guard
+ && let ExprKind::Match(
+ scrutinee,
+ [
+ arm,
+ Arm {
+ pat: Pat {
+ kind: PatKind::Wild,
+ ..
+ },
+ ..
+ },
+ ],
+ MatchSource::Normal,
+ ) = if_expr.kind
+ {
+ emit_redundant_guards(
+ cx,
+ outer_arm,
+ if_expr.span,
+ scrutinee,
+ arm.pat.span,
+ arm.guard,
+ );
+ }
+ // `Some(x) if let Some(2) = x`
+ else if let Guard::IfLet(let_expr) = guard {
+ emit_redundant_guards(
+ cx,
+ outer_arm,
+ let_expr.span,
+ let_expr.init,
+ let_expr.pat.span,
+ None,
+ );
+ }
+ // `Some(x) if x == Some(2)`
+ else if let Guard::If(if_expr) = guard
+ && let ExprKind::Binary(bin_op, local, pat) = if_expr.kind
+ && matches!(bin_op.node, BinOpKind::Eq)
+ && expr_can_be_pat(cx, pat)
+ // Ensure they have the same type. If they don't, we'd need deref coercion which isn't
+ // possible (currently) in a pattern. In some cases, you can use something like
+ // `as_deref` or similar but in general, we shouldn't lint this as it'd create an
+ // extraordinary amount of FPs.
+ //
+ // This isn't necessary in the other two checks, as they must be a pattern already.
+ && cx.typeck_results().expr_ty(local) == cx.typeck_results().expr_ty(pat)
+ {
+ emit_redundant_guards(
+ cx,
+ outer_arm,
+ if_expr.span,
+ local,
+ pat.span,
+ None,
+ );
+ }
+ }
+}
+
+fn get_pat_binding<'tcx>(cx: &LateContext<'tcx>, guard_expr: &Expr<'_>, outer_arm: &Arm<'tcx>) -> Option<(Span, bool)> {
+ if let Some(local) = path_to_local(guard_expr) && !is_local_used(cx, outer_arm.body, local) {
+ let mut span = None;
+ let mut multiple_bindings = false;
+ // `each_binding` gives the `HirId` of the `Pat` itself, not the binding
+ outer_arm.pat.walk(|pat| {
+ if let PatKind::Binding(_, hir_id, _, _) = pat.kind
+ && hir_id == local
+ && span.replace(pat.span).is_some()
+ {
+ multiple_bindings = true;
+ return false;
+ }
+
+ true
+ });
+
+ // Ignore bindings from or patterns, like `First(x) | Second(x, _) | Third(x, _, _)`
+ if !multiple_bindings {
+ return span.map(|span| {
+ (
+ span,
+ !matches!(cx.tcx.hir().get_parent(local), Node::PatField(_)),
+ )
+ });
+ }
+ }
+
+ None
+}
+
+fn emit_redundant_guards<'tcx>(
+ cx: &LateContext<'tcx>,
+ outer_arm: &Arm<'tcx>,
+ guard_span: Span,
+ local: &Expr<'_>,
+ pat_span: Span,
+ inner_guard: Option<Guard<'_>>,
+) {
+ let mut app = Applicability::MaybeIncorrect;
+ let Some((pat_binding, can_use_shorthand)) = get_pat_binding(cx, local, outer_arm) else {
+ return;
+ };
+
+ span_lint_and_then(
+ cx,
+ REDUNDANT_GUARDS,
+ guard_span.source_callsite(),
+ "redundant guard",
+ |diag| {
+ let binding_replacement = snippet_with_applicability(cx, pat_span, "<binding_repl>", &mut app);
+ diag.multipart_suggestion_verbose(
+ "try",
+ vec![
+ if can_use_shorthand {
+ (pat_binding, binding_replacement.into_owned())
+ } else {
+ (pat_binding.shrink_to_hi(), format!(": {binding_replacement}"))
+ },
+ (
+ guard_span.source_callsite().with_lo(outer_arm.pat.span.hi()),
+ inner_guard.map_or_else(String::new, |guard| {
+ let (prefix, span) = match guard {
+ Guard::If(e) => ("if", e.span),
+ Guard::IfLet(l) => ("if let", l.span),
+ };
+
+ format!(
+ " {prefix} {}",
+ snippet_with_applicability(cx, span, "<guard>", &mut app),
+ )
+ }),
+ ),
+ ],
+ app,
+ );
+ },
+ );
+}
+
+/// Checks if the given `Expr` can also be represented as a `Pat`.
+///
+/// All literals generally also work as patterns, however float literals are special.
+/// They are currently (as of 2023/08/08) still allowed in patterns, but that will become
+/// an error in the future, and rustc already actively warns against this (see rust#41620),
+/// so we don't consider those as usable within patterns for linting purposes.
+fn expr_can_be_pat(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ for_each_expr(expr, |expr| {
+ if match expr.kind {
+ ExprKind::ConstBlock(..) => cx.tcx.features().inline_const_pat,
+ ExprKind::Call(c, ..) if let ExprKind::Path(qpath) = c.kind => {
+ // Allow ctors
+ matches!(cx.qpath_res(&qpath, c.hir_id), Res::Def(DefKind::Ctor(..), ..))
+ },
+ ExprKind::Path(qpath) => {
+ matches!(
+ cx.qpath_res(&qpath, expr.hir_id),
+ Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Ctor(..), ..),
+ )
+ },
+ ExprKind::AddrOf(..)
+ | ExprKind::Array(..)
+ | ExprKind::Tup(..)
+ | ExprKind::Struct(..) => true,
+ ExprKind::Lit(lit) if !matches!(lit.node, LitKind::Float(..)) => true,
+ _ => false,
+ } {
+ return ControlFlow::Continue(());
+ }
+
+ ControlFlow::Break(())
+ })
+ .is_none()
+}
diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
index 479cfd835..9a7c00823 100644
--- a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs
@@ -3,17 +3,19 @@ use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
use clippy_utils::source::{snippet, walk_span_to_context};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, needs_ordered_drop};
-use clippy_utils::visitors::any_temporaries_need_ordered_drop;
+use clippy_utils::visitors::{any_temporaries_need_ordered_drop, for_each_expr};
use clippy_utils::{higher, is_expn_of, is_trait_method};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::LangItem::{self, OptionNone, OptionSome, PollPending, PollReady, ResultErr, ResultOk};
-use rustc_hir::{Arm, Expr, ExprKind, Node, Pat, PatKind, QPath, UnOp};
+use rustc_hir::{Arm, Expr, ExprKind, Guard, Node, Pat, PatKind, QPath, UnOp};
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, subst::GenericArgKind, Ty};
+use rustc_middle::ty::{self, GenericArgKind, Ty};
use rustc_span::{sym, Symbol};
+use std::fmt::Write;
+use std::ops::ControlFlow;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let Some(higher::WhileLet { let_pat, let_expr, .. }) = higher::WhileLet::hir(expr) {
@@ -45,49 +47,39 @@ fn try_get_generic_ty(ty: Ty<'_>, index: usize) -> Option<Ty<'_>> {
}
}
-fn find_sugg_for_if_let<'tcx>(
+fn find_method_and_type<'tcx>(
cx: &LateContext<'tcx>,
- expr: &'tcx Expr<'_>,
- let_pat: &Pat<'_>,
- let_expr: &'tcx Expr<'_>,
- keyword: &'static str,
- has_else: bool,
-) {
- // also look inside refs
- // if we have &None for example, peel it so we can detect "if let None = x"
- let check_pat = match let_pat.kind {
- PatKind::Ref(inner, _mutability) => inner,
- _ => let_pat,
- };
- let op_ty = cx.typeck_results().expr_ty(let_expr);
- // Determine which function should be used, and the type contained by the corresponding
- // variant.
- let (good_method, inner_ty) = match check_pat.kind {
+ check_pat: &Pat<'_>,
+ op_ty: Ty<'tcx>,
+) -> Option<(&'static str, Ty<'tcx>)> {
+ match check_pat.kind {
PatKind::TupleStruct(ref qpath, args, rest) => {
let is_wildcard = matches!(args.first().map(|p| &p.kind), Some(PatKind::Wild));
let is_rest = matches!((args, rest.as_opt_usize()), ([], Some(_)));
if is_wildcard || is_rest {
let res = cx.typeck_results().qpath_res(qpath, check_pat.hir_id);
- let Some(id) = res.opt_def_id().map(|ctor_id| cx.tcx.parent(ctor_id)) else { return };
+ let Some(id) = res.opt_def_id().map(|ctor_id| cx.tcx.parent(ctor_id)) else {
+ return None;
+ };
let lang_items = cx.tcx.lang_items();
if Some(id) == lang_items.result_ok_variant() {
- ("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty))
+ Some(("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty)))
} else if Some(id) == lang_items.result_err_variant() {
- ("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty))
+ Some(("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty)))
} else if Some(id) == lang_items.option_some_variant() {
- ("is_some()", op_ty)
+ Some(("is_some()", op_ty))
} else if Some(id) == lang_items.poll_ready_variant() {
- ("is_ready()", op_ty)
+ Some(("is_ready()", op_ty))
} else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym!(V4))) {
- ("is_ipv4()", op_ty)
+ Some(("is_ipv4()", op_ty))
} else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym!(V6))) {
- ("is_ipv6()", op_ty)
+ Some(("is_ipv6()", op_ty))
} else {
- return;
+ None
}
} else {
- return;
+ None
}
},
PatKind::Path(ref path) => {
@@ -99,15 +91,37 @@ fn find_sugg_for_if_let<'tcx>(
} else if cx.tcx.lang_items().poll_pending_variant() == Some(variant_id) {
"is_pending()"
} else {
- return;
+ return None;
};
// `None` and `Pending` don't have an inner type.
- (method, cx.tcx.types.unit)
+ Some((method, cx.tcx.types.unit))
} else {
- return;
+ None
}
},
- _ => return,
+ _ => None,
+ }
+}
+
+fn find_sugg_for_if_let<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'_>,
+ let_pat: &Pat<'_>,
+ let_expr: &'tcx Expr<'_>,
+ keyword: &'static str,
+ has_else: bool,
+) {
+ // also look inside refs
+ // if we have &None for example, peel it so we can detect "if let None = x"
+ let check_pat = match let_pat.kind {
+ PatKind::Ref(inner, _mutability) => inner,
+ _ => let_pat,
+ };
+ let op_ty = cx.typeck_results().expr_ty(let_expr);
+ // Determine which function should be used, and the type contained by the corresponding
+ // variant.
+ let Some((good_method, inner_ty)) = find_method_and_type(cx, check_pat, op_ty) else {
+ return;
};
// If this is the last expression in a block or there is an else clause then the whole
@@ -175,7 +189,7 @@ fn find_sugg_for_if_let<'tcx>(
.maybe_par()
.to_string();
- diag.span_suggestion(span, "try this", format!("{keyword} {sugg}.{good_method}"), app);
+ diag.span_suggestion(span, "try", format!("{keyword} {sugg}.{good_method}"), app);
if needs_drop {
diag.note("this will change drop order of the result, as well as all temporaries");
@@ -189,30 +203,58 @@ pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op
if arms.len() == 2 {
let node_pair = (&arms[0].pat.kind, &arms[1].pat.kind);
- if let Some(good_method) = found_good_method(cx, arms, node_pair) {
+ if let Some((good_method, maybe_guard)) = found_good_method(cx, arms, node_pair) {
let span = is_expn_of(expr.span, "matches").unwrap_or(expr.span.to(op.span));
let result_expr = match &op.kind {
ExprKind::AddrOf(_, _, borrowed) => borrowed,
_ => op,
};
+ let mut sugg = format!("{}.{good_method}", snippet(cx, result_expr.span, "_"));
+
+ if let Some(guard) = maybe_guard {
+ let Guard::If(guard) = *guard else { return }; // `...is_none() && let ...` is a syntax error
+
+ // wow, the HIR for match guards in `PAT if let PAT = expr && expr => ...` is annoying!
+ // `guard` here is `Guard::If` with the let expression somewhere deep in the tree of exprs,
+ // counter to the intuition that it should be `Guard::IfLet`, so we need another check
+ // to see that there aren't any let chains anywhere in the guard, as that would break
+ // if we suggest `t.is_none() && (let X = y && z)` for:
+ // `match t { None if let X = y && z => true, _ => false }`
+ let has_nested_let_chain = for_each_expr(guard, |expr| {
+ if matches!(expr.kind, ExprKind::Let(..)) {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
+ })
+ .is_some();
+
+ if has_nested_let_chain {
+ return;
+ }
+
+ let guard = Sugg::hir(cx, guard, "..");
+ let _ = write!(sugg, " && {}", guard.maybe_par());
+ }
+
span_lint_and_sugg(
cx,
REDUNDANT_PATTERN_MATCHING,
span,
&format!("redundant pattern matching, consider using `{good_method}`"),
- "try this",
- format!("{}.{good_method}", snippet(cx, result_expr.span, "_")),
+ "try",
+ sugg,
Applicability::MachineApplicable,
);
}
}
}
-fn found_good_method<'a>(
+fn found_good_method<'tcx>(
cx: &LateContext<'_>,
- arms: &[Arm<'_>],
+ arms: &'tcx [Arm<'tcx>],
node: (&PatKind<'_>, &PatKind<'_>),
-) -> Option<&'a str> {
+) -> Option<(&'static str, Option<&'tcx Guard<'tcx>>)> {
match node {
(
PatKind::TupleStruct(ref path_left, patterns_left, _),
@@ -298,7 +340,11 @@ fn get_ident(path: &QPath<'_>) -> Option<rustc_span::symbol::Ident> {
}
}
-fn get_good_method<'a>(cx: &LateContext<'_>, arms: &[Arm<'_>], path_left: &QPath<'_>) -> Option<&'a str> {
+fn get_good_method<'tcx>(
+ cx: &LateContext<'_>,
+ arms: &'tcx [Arm<'tcx>],
+ path_left: &QPath<'_>,
+) -> Option<(&'static str, Option<&'tcx Guard<'tcx>>)> {
if let Some(name) = get_ident(path_left) {
return match name.as_str() {
"Ok" => {
@@ -336,7 +382,9 @@ enum Item {
}
fn is_pat_variant(cx: &LateContext<'_>, pat: &Pat<'_>, path: &QPath<'_>, expected_item: Item) -> bool {
- let Some(id) = cx.typeck_results().qpath_res(path, pat.hir_id).opt_def_id() else { return false };
+ let Some(id) = cx.typeck_results().qpath_res(path, pat.hir_id).opt_def_id() else {
+ return false;
+ };
match expected_item {
Item::Lang(expected_lang_item) => cx
@@ -362,16 +410,16 @@ fn is_pat_variant(cx: &LateContext<'_>, pat: &Pat<'_>, path: &QPath<'_>, expecte
}
#[expect(clippy::too_many_arguments)]
-fn find_good_method_for_match<'a>(
+fn find_good_method_for_match<'a, 'tcx>(
cx: &LateContext<'_>,
- arms: &[Arm<'_>],
+ arms: &'tcx [Arm<'tcx>],
path_left: &QPath<'_>,
path_right: &QPath<'_>,
expected_item_left: Item,
expected_item_right: Item,
should_be_left: &'a str,
should_be_right: &'a str,
-) -> Option<&'a str> {
+) -> Option<(&'a str, Option<&'tcx Guard<'tcx>>)> {
let first_pat = arms[0].pat;
let second_pat = arms[1].pat;
@@ -389,22 +437,22 @@ fn find_good_method_for_match<'a>(
match body_node_pair {
(ExprKind::Lit(lit_left), ExprKind::Lit(lit_right)) => match (&lit_left.node, &lit_right.node) {
- (LitKind::Bool(true), LitKind::Bool(false)) => Some(should_be_left),
- (LitKind::Bool(false), LitKind::Bool(true)) => Some(should_be_right),
+ (LitKind::Bool(true), LitKind::Bool(false)) => Some((should_be_left, arms[0].guard.as_ref())),
+ (LitKind::Bool(false), LitKind::Bool(true)) => Some((should_be_right, arms[1].guard.as_ref())),
_ => None,
},
_ => None,
}
}
-fn find_good_method_for_matches_macro<'a>(
+fn find_good_method_for_matches_macro<'a, 'tcx>(
cx: &LateContext<'_>,
- arms: &[Arm<'_>],
+ arms: &'tcx [Arm<'tcx>],
path_left: &QPath<'_>,
expected_item_left: Item,
should_be_left: &'a str,
should_be_right: &'a str,
-) -> Option<&'a str> {
+) -> Option<(&'a str, Option<&'tcx Guard<'tcx>>)> {
let first_pat = arms[0].pat;
let body_node_pair = if is_pat_variant(cx, first_pat, path_left, expected_item_left) {
@@ -415,8 +463,8 @@ fn find_good_method_for_matches_macro<'a>(
match body_node_pair {
(ExprKind::Lit(lit_left), ExprKind::Lit(lit_right)) => match (&lit_left.node, &lit_right.node) {
- (LitKind::Bool(true), LitKind::Bool(false)) => Some(should_be_left),
- (LitKind::Bool(false), LitKind::Bool(true)) => Some(should_be_right),
+ (LitKind::Bool(true), LitKind::Bool(false)) => Some((should_be_left, arms[0].guard.as_ref())),
+ (LitKind::Bool(false), LitKind::Bool(true)) => Some((should_be_right, arms[1].guard.as_ref())),
_ => None,
},
_ => None,
diff --git a/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs b/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
index d06bcdaa2..4efe93d4b 100644
--- a/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/rest_pat_in_fully_bound_struct.rs
@@ -10,7 +10,7 @@ pub(crate) fn check(cx: &LateContext<'_>, pat: &Pat<'_>) {
if !pat.span.from_expansion();
if let PatKind::Struct(QPath::Resolved(_, path), fields, true) = pat.kind;
if let Some(def_id) = path.res.opt_def_id();
- let ty = cx.tcx.type_of(def_id).subst_identity();
+ let ty = cx.tcx.type_of(def_id).instantiate_identity();
if let ty::Adt(def, _) = ty.kind();
if def.is_struct() || def.is_union();
if fields.len() == def.non_enum_variant().fields.len();
diff --git a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
index 37528d9f7..ee0fdb353 100644
--- a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs
@@ -6,8 +6,7 @@ use rustc_errors::{Applicability, Diagnostic};
use rustc_hir::intravisit::{walk_expr, Visitor};
use rustc_hir::{Arm, Expr, ExprKind, MatchSource};
use rustc_lint::{LateContext, LintContext};
-use rustc_middle::ty::subst::GenericArgKind;
-use rustc_middle::ty::{Ty, TypeAndMut};
+use rustc_middle::ty::{GenericArgKind, Ty, TypeAndMut};
use rustc_span::Span;
use super::SIGNIFICANT_DROP_IN_SCRUTINEE;
diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
index 35627d6c6..6b05c6bff 100644
--- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs
@@ -136,7 +136,7 @@ fn report_single_pattern(
}
};
- span_lint_and_sugg(cx, lint, expr.span, msg, "try this", sugg, app);
+ span_lint_and_sugg(cx, lint, expr.span, msg, "try", sugg, app);
}
fn check_opt_like<'a>(
diff --git a/src/tools/clippy/clippy_lints/src/matches/try_err.rs b/src/tools/clippy/clippy_lints/src/matches/try_err.rs
index 3a7f1e034..0fd6f533d 100644
--- a/src/tools/clippy/clippy_lints/src/matches/try_err.rs
+++ b/src/tools/clippy/clippy_lints/src/matches/try_err.rs
@@ -70,7 +70,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, scrutine
TRY_ERR,
expr.span,
"returning an `Err(_)` with the `?` operator",
- "try this",
+ "try",
suggestion,
applicability,
);
@@ -80,7 +80,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, scrutine
/// Finds function return type by examining return expressions in match arms.
fn find_return_type<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx ExprKind<'_>) -> Option<Ty<'tcx>> {
- if let ExprKind::Match(_, arms, MatchSource::TryDesugar) = expr {
+ if let ExprKind::Match(_, arms, MatchSource::TryDesugar(_)) = expr {
for arm in *arms {
if let ExprKind::Ret(Some(ret)) = arm.body.kind {
return Some(cx.typeck_results().expr_ty(ret));
diff --git a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
index 008533488..3a8cc4174 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs
@@ -1,7 +1,8 @@
use super::{contains_return, BIND_INSTEAD_OF_MAP};
use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::peel_blocks;
use clippy_utils::source::{snippet, snippet_with_context};
-use clippy_utils::{peel_blocks, visitors::find_all_ret_expressions};
+use clippy_utils::visitors::find_all_ret_expressions;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -87,7 +88,7 @@ pub(crate) trait BindInsteadOfMap {
BIND_INSTEAD_OF_MAP,
expr.span,
&msg,
- "try this",
+ "try",
note,
app,
);
@@ -124,7 +125,7 @@ pub(crate) trait BindInsteadOfMap {
span_lint_and_then(cx, BIND_INSTEAD_OF_MAP, expr.span, &msg, |diag| {
multispan_sugg_with_applicability(
diag,
- "try this",
+ "try",
Applicability::MachineApplicable,
std::iter::once((span, Self::GOOD_METHOD_NAME.into())).chain(
suggs
diff --git a/src/tools/clippy/clippy_lints/src/methods/bytecount.rs b/src/tools/clippy/clippy_lints/src/methods/bytecount.rs
index fef90f6eb..f490a7175 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bytecount.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bytecount.rs
@@ -45,7 +45,7 @@ pub(super) fn check<'tcx>(
let haystack = if let ExprKind::MethodCall(path, receiver, [], _) =
filter_recv.kind {
let p = path.ident.name;
- if p == sym::iter || p == sym!(iter_mut) {
+ if p == sym::iter || p == sym::iter_mut {
receiver
} else {
filter_recv
diff --git a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs
index 46a20ad41..649fc46e4 100644
--- a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs
@@ -17,7 +17,7 @@ pub(super) fn check<'tcx>(
if_chain! {
if let Some(bytes_id) = cx.typeck_results().type_dependent_def_id(count_recv.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(bytes_id);
- if cx.tcx.type_of(impl_id).subst_identity().is_str();
+ if cx.tcx.type_of(impl_id).instantiate_identity().is_str();
let ty = cx.typeck_results().expr_ty(bytes_recv).peel_refs();
if ty.is_str() || is_type_lang_item(cx, ty, hir::LangItem::String);
then {
diff --git a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs
index 7711aa78b..d5897822e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs
@@ -1,13 +1,13 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::source::snippet_opt;
-use clippy_utils::source::{indent_of, reindent_multiline};
+use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt};
use clippy_utils::ty::is_type_lang_item;
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem};
use rustc_lint::LateContext;
-use rustc_span::{source_map::Spanned, Span};
+use rustc_span::source_map::Spanned;
+use rustc_span::Span;
use super::CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS;
@@ -30,7 +30,7 @@ pub(super) fn check<'tcx>(
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if cx.tcx.type_of(impl_id).subst_identity().is_str();
+ if cx.tcx.type_of(impl_id).instantiate_identity().is_str();
if let ExprKind::Lit(Spanned { node: LitKind::Str(ext_literal, ..), ..}) = arg.kind;
if (2..=6).contains(&ext_literal.as_str().len());
let ext_str = ext_literal.as_str();
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
index 079df2226..0e41f3c21 100644
--- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs
@@ -4,8 +4,7 @@ use clippy_utils::{method_chain_args, path_def_id};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
-use rustc_lint::LateContext;
-use rustc_lint::Lint;
+use rustc_lint::{LateContext, Lint};
use rustc_middle::ty;
/// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints.
diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
index 8984b2cf8..c9d50a5b0 100644
--- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs
@@ -5,8 +5,7 @@ use if_chain::if_chain;
use rustc_ast::ast;
use rustc_errors::Applicability;
use rustc_hir as hir;
-use rustc_lint::LateContext;
-use rustc_lint::Lint;
+use rustc_lint::{LateContext, Lint};
/// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints with `unwrap()`.
pub(super) fn check(
diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
index 65fd50dff..eb4f003d3 100644
--- a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs
@@ -5,7 +5,9 @@ use clippy_utils::ty::is_copy;
use rustc_errors::Applicability;
use rustc_hir::{BindingAnnotation, ByRef, Expr, ExprKind, MatchSource, Node, PatKind, QPath};
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, adjustment::Adjust, print::with_forced_trimmed_paths};
+use rustc_middle::ty::adjustment::Adjust;
+use rustc_middle::ty::print::with_forced_trimmed_paths;
+use rustc_middle::ty::{self};
use rustc_span::symbol::{sym, Symbol};
use super::CLONE_ON_COPY;
@@ -62,7 +64,7 @@ pub(super) fn check(
ExprKind::Path(QPath::LangItem(rustc_hir::LangItem::TryTraitBranch, _, _))
),
ExprKind::MethodCall(_, self_arg, ..) if expr.hir_id == self_arg.hir_id => true,
- ExprKind::Match(_, _, MatchSource::TryDesugar | MatchSource::AwaitDesugar)
+ ExprKind::Match(_, _, MatchSource::TryDesugar(_) | MatchSource::AwaitDesugar)
| ExprKind::Field(..)
| ExprKind::Index(..) => true,
_ => false,
diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
index 5e8ad0861..ddf3c9f27 100644
--- a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs
@@ -42,7 +42,7 @@ pub(super) fn check(
CLONE_ON_REF_PTR,
expr.span,
"using `.clone()` on a ref-counted pointer",
- "try this",
+ "try",
format!("{caller_type}::<{}>::clone(&{snippet})", subst.type_at(0)),
app,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs b/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs
index 5e01ed90f..5409ede60 100644
--- a/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs
@@ -8,8 +8,7 @@ use rustc_hir as hir;
use rustc_lint::LateContext;
use std::collections::VecDeque;
-use super::method_call;
-use super::COLLAPSIBLE_STR_REPLACE;
+use super::{method_call, COLLAPSIBLE_STR_REPLACE};
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
diff --git a/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs b/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs
index d0c79dc11..6a82d8f75 100644
--- a/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs
@@ -4,17 +4,12 @@ use clippy_utils::is_range_full;
use clippy_utils::source::snippet;
use clippy_utils::ty::is_type_lang_item;
use rustc_errors::Applicability;
-use rustc_hir::Expr;
-use rustc_hir::ExprKind;
-use rustc_hir::LangItem;
-use rustc_hir::Path;
-use rustc_hir::QPath;
+use rustc_hir::{Expr, ExprKind, LangItem, Path, QPath};
use rustc_lint::LateContext;
use rustc_middle::query::Key;
use rustc_middle::ty;
use rustc_middle::ty::Ty;
-use rustc_span::sym;
-use rustc_span::Symbol;
+use rustc_span::{sym, Symbol};
/// Checks if both types match the given diagnostic item, e.g.:
///
diff --git a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
index ae03da0d3..3d82441c0 100644
--- a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs
@@ -1,8 +1,7 @@
use super::ERR_EXPECT;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::ty::has_debug_impl;
-use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::ty::{has_debug_impl, is_type_diagnostic_item};
use rustc_errors::Applicability;
use rustc_lint::LateContext;
use rustc_middle::ty;
@@ -47,7 +46,7 @@ pub(super) fn check(
/// Given a `Result<T, E>` type, return its data (`T`).
fn get_data_type<'a>(cx: &LateContext<'_>, ty: Ty<'a>) -> Option<Ty<'a>> {
match ty.kind() {
- ty::Adt(_, substs) if is_type_diagnostic_item(cx, ty, sym::Result) => substs.types().next(),
+ ty::Adt(_, args) if is_type_diagnostic_item(cx, ty, sym::Result) => args.types().next(),
_ => None,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
index 92d21bb89..d3e90e4bb 100644
--- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs
@@ -70,7 +70,7 @@ pub(super) fn check<'tcx>(
if let hir::ExprKind::Path(ref p) = fun.kind {
match cx.qpath_res(p, fun.hir_id) {
hir::def::Res::Def(hir::def::DefKind::Fn | hir::def::DefKind::AssocFn, def_id) => matches!(
- cx.tcx.fn_sig(def_id).subst_identity().output().skip_binder().kind(),
+ cx.tcx.fn_sig(def_id).instantiate_identity().output().skip_binder().kind(),
ty::Ref(re, ..) if re.is_static(),
),
_ => false,
@@ -84,7 +84,7 @@ pub(super) fn check<'tcx>(
.type_dependent_def_id(arg.hir_id)
.map_or(false, |method_id| {
matches!(
- cx.tcx.fn_sig(method_id).subst_identity().output().skip_binder().kind(),
+ cx.tcx.fn_sig(method_id).instantiate_identity().output().skip_binder().kind(),
ty::Ref(re, ..) if re.is_static()
)
})
@@ -144,7 +144,7 @@ pub(super) fn check<'tcx>(
EXPECT_FUN_CALL,
span_replace_word,
&format!("use of `{name}` followed by a function call"),
- "try this",
+ "try",
format!("unwrap_or_else({closure_args} panic!({sugg}))"),
applicability,
);
@@ -162,7 +162,7 @@ pub(super) fn check<'tcx>(
EXPECT_FUN_CALL,
span_replace_word,
&format!("use of `{name}` followed by a function call"),
- "try this",
+ "try",
format!("unwrap_or_else({closure_args} {{ panic!(\"{{}}\", {arg_root_snippet}) }})"),
applicability,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_used.rs b/src/tools/clippy/clippy_lints/src/methods/expect_used.rs
deleted file mode 100644
index 614610335..000000000
--- a/src/tools/clippy/clippy_lints/src/methods/expect_used.rs
+++ /dev/null
@@ -1,44 +0,0 @@
-use clippy_utils::diagnostics::span_lint_and_help;
-use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::{is_in_cfg_test, is_in_test_function};
-use rustc_hir as hir;
-use rustc_lint::LateContext;
-use rustc_span::sym;
-
-use super::EXPECT_USED;
-
-/// lint use of `expect()` or `expect_err` for `Result` and `expect()` for `Option`.
-pub(super) fn check(
- cx: &LateContext<'_>,
- expr: &hir::Expr<'_>,
- recv: &hir::Expr<'_>,
- is_err: bool,
- allow_expect_in_tests: bool,
-) {
- let obj_ty = cx.typeck_results().expr_ty(recv).peel_refs();
-
- let mess = if is_type_diagnostic_item(cx, obj_ty, sym::Option) && !is_err {
- Some((EXPECT_USED, "an `Option`", "None", ""))
- } else if is_type_diagnostic_item(cx, obj_ty, sym::Result) {
- Some((EXPECT_USED, "a `Result`", if is_err { "Ok" } else { "Err" }, "an "))
- } else {
- None
- };
-
- let method = if is_err { "expect_err" } else { "expect" };
-
- if allow_expect_in_tests && (is_in_test_function(cx.tcx, expr.hir_id) || is_in_cfg_test(cx.tcx, expr.hir_id)) {
- return;
- }
-
- if let Some((lint, kind, none_value, none_prefix)) = mess {
- span_lint_and_help(
- cx,
- lint,
- expr.span,
- &format!("used `{method}()` on {kind} value"),
- None,
- &format!("if this value is {none_prefix}`{none_value}`, it will panic"),
- );
- }
-}
diff --git a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
index 37b284635..495b26652 100644
--- a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs
@@ -31,7 +31,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg:
EXTEND_WITH_DRAIN,
expr.span,
"use of `extend` instead of `append` for adding the full range of a second vector",
- "try this",
+ "try",
format!(
"{}.append({}{})",
snippet_with_applicability(cx, recv.span, "..", &mut applicability),
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
index fc80f2eea..c9eaa185a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs
@@ -1,7 +1,9 @@
-use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then};
+use clippy_utils::macros::{is_panic, root_macro_call};
use clippy_utils::source::{indent_of, reindent_multiline, snippet};
use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::{is_trait_method, path_to_local_id, peel_blocks, SpanlessEq};
+use clippy_utils::{higher, is_trait_method, path_to_local_id, peel_blocks, SpanlessEq};
+use hir::{Body, HirId, MatchSource, Pat};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
@@ -10,12 +12,10 @@ use rustc_hir::{Closure, Expr, ExprKind, PatKind, PathSegment, QPath, UnOp};
use rustc_lint::LateContext;
use rustc_middle::ty::adjustment::Adjust;
use rustc_span::source_map::Span;
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::symbol::{sym, Ident, Symbol};
use std::borrow::Cow;
-use super::MANUAL_FILTER_MAP;
-use super::MANUAL_FIND_MAP;
-use super::OPTION_FILTER_MAP;
+use super::{MANUAL_FILTER_MAP, MANUAL_FIND_MAP, OPTION_FILTER_MAP};
fn is_method(cx: &LateContext<'_>, expr: &hir::Expr<'_>, method_name: Symbol) -> bool {
match &expr.kind {
@@ -50,6 +50,214 @@ fn is_option_filter_map(cx: &LateContext<'_>, filter_arg: &hir::Expr<'_>, map_ar
is_method(cx, map_arg, sym::unwrap) && is_method(cx, filter_arg, sym!(is_some))
}
+#[derive(Debug, Copy, Clone)]
+enum OffendingFilterExpr<'tcx> {
+ /// `.filter(|opt| opt.is_some())`
+ IsSome {
+ /// The receiver expression
+ receiver: &'tcx Expr<'tcx>,
+ /// If `Some`, then this contains the span of an expression that possibly contains side
+ /// effects: `.filter(|opt| side_effect(opt).is_some())`
+ /// ^^^^^^^^^^^^^^^^
+ ///
+ /// We will use this later for warning the user that the suggested fix may change
+ /// the behavior.
+ side_effect_expr_span: Option<Span>,
+ },
+ /// `.filter(|res| res.is_ok())`
+ IsOk {
+ /// The receiver expression
+ receiver: &'tcx Expr<'tcx>,
+ /// See `IsSome`
+ side_effect_expr_span: Option<Span>,
+ },
+ /// `.filter(|enum| matches!(enum, Enum::A(_)))`
+ Matches {
+ /// The DefId of the variant being matched
+ variant_def_id: hir::def_id::DefId,
+ },
+}
+
+#[derive(Debug)]
+enum CalledMethod {
+ OptionIsSome,
+ ResultIsOk,
+}
+
+/// The result of checking a `map` call, returned by `OffendingFilterExpr::check_map_call`
+#[derive(Debug)]
+enum CheckResult<'tcx> {
+ Method {
+ map_arg: &'tcx Expr<'tcx>,
+ /// The method that was called inside of `filter`
+ method: CalledMethod,
+ /// See `OffendingFilterExpr::IsSome`
+ side_effect_expr_span: Option<Span>,
+ },
+ PatternMatching {
+ /// The span of the variant being matched
+ /// if let Some(s) = enum
+ /// ^^^^^^^
+ variant_span: Span,
+ /// if let Some(s) = enum
+ /// ^
+ variant_ident: Ident,
+ },
+}
+
+impl<'tcx> OffendingFilterExpr<'tcx> {
+ pub fn check_map_call(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ map_body: &'tcx Body<'tcx>,
+ map_param_id: HirId,
+ filter_param_id: HirId,
+ is_filter_param_ref: bool,
+ ) -> Option<CheckResult<'tcx>> {
+ match *self {
+ OffendingFilterExpr::IsSome {
+ receiver,
+ side_effect_expr_span,
+ }
+ | OffendingFilterExpr::IsOk {
+ receiver,
+ side_effect_expr_span,
+ } => {
+ // check if closure ends with expect() or unwrap()
+ if let ExprKind::MethodCall(seg, map_arg, ..) = map_body.value.kind
+ && matches!(seg.ident.name, sym::expect | sym::unwrap | sym::unwrap_or)
+ // .map(|y| f(y).copied().unwrap())
+ // ~~~~
+ && let map_arg_peeled = match map_arg.kind {
+ ExprKind::MethodCall(method, original_arg, [], _) if acceptable_methods(method) => {
+ original_arg
+ },
+ _ => map_arg,
+ }
+ // .map(|y| y[.acceptable_method()].unwrap())
+ && let simple_equal = (path_to_local_id(receiver, filter_param_id)
+ && path_to_local_id(map_arg_peeled, map_param_id))
+ && let eq_fallback = (|a: &Expr<'_>, b: &Expr<'_>| {
+ // in `filter(|x| ..)`, replace `*x` with `x`
+ let a_path = if_chain! {
+ if !is_filter_param_ref;
+ if let ExprKind::Unary(UnOp::Deref, expr_path) = a.kind;
+ then { expr_path } else { a }
+ };
+ // let the filter closure arg and the map closure arg be equal
+ path_to_local_id(a_path, filter_param_id)
+ && path_to_local_id(b, map_param_id)
+ && cx.typeck_results().expr_ty_adjusted(a) == cx.typeck_results().expr_ty_adjusted(b)
+ })
+ && (simple_equal
+ || SpanlessEq::new(cx).expr_fallback(eq_fallback).eq_expr(receiver, map_arg_peeled))
+ {
+ Some(CheckResult::Method {
+ map_arg,
+ side_effect_expr_span,
+ method: match self {
+ OffendingFilterExpr::IsSome { .. } => CalledMethod::OptionIsSome,
+ OffendingFilterExpr::IsOk { .. } => CalledMethod::ResultIsOk,
+ OffendingFilterExpr::Matches { .. } => unreachable!("only IsSome and IsOk can get here"),
+ }
+ })
+ } else {
+ None
+ }
+ },
+ OffendingFilterExpr::Matches { variant_def_id } => {
+ let expr_uses_local = |pat: &Pat<'_>, expr: &Expr<'_>| {
+ if let PatKind::TupleStruct(QPath::Resolved(_, path), [subpat], _) = pat.kind
+ && let PatKind::Binding(_, local_id, ident, _) = subpat.kind
+ && path_to_local_id(expr.peel_blocks(), local_id)
+ && let Some(local_variant_def_id) = path.res.opt_def_id()
+ && local_variant_def_id == variant_def_id
+ {
+ Some((ident, pat.span))
+ } else {
+ None
+ }
+ };
+
+ // look for:
+ // `if let Variant (v) = enum { v } else { unreachable!() }`
+ // ^^^^^^^ ^ ^^^^ ^^^^^^^^^^^^^^^^^^
+ // variant_span variant_ident scrutinee else_ (blocks peeled later)
+ // OR
+ // `match enum { Variant (v) => v, _ => unreachable!() }`
+ // ^^^^ ^^^^^^^ ^ ^^^^^^^^^^^^^^
+ // scrutinee variant_span variant_ident else_
+ let (scrutinee, else_, variant_ident, variant_span) =
+ match higher::IfLetOrMatch::parse(cx, map_body.value) {
+ // For `if let` we want to check that the variant matching arm references the local created by its pattern
+ Some(higher::IfLetOrMatch::IfLet(sc, pat, then, Some(else_)))
+ if let Some((ident, span)) = expr_uses_local(pat, then) =>
+ {
+ (sc, else_, ident, span)
+ },
+ // For `match` we want to check that the "else" arm is the wildcard (`_`) pattern
+ // and that the variant matching arm references the local created by its pattern
+ Some(higher::IfLetOrMatch::Match(sc, [arm, wild_arm], MatchSource::Normal))
+ if let PatKind::Wild = wild_arm.pat.kind
+ && let Some((ident, span)) = expr_uses_local(arm.pat, arm.body.peel_blocks()) =>
+ {
+ (sc, wild_arm.body, ident, span)
+ },
+ _ => return None,
+ };
+
+ if path_to_local_id(scrutinee, map_param_id)
+ // else branch should be a `panic!` or `unreachable!` macro call
+ && let Some(mac) = root_macro_call(else_.peel_blocks().span)
+ && (is_panic(cx, mac.def_id) || cx.tcx.opt_item_name(mac.def_id) == Some(sym::unreachable))
+ {
+ Some(CheckResult::PatternMatching { variant_span, variant_ident })
+ } else {
+ None
+ }
+ },
+ }
+ }
+
+ fn hir(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, filter_param_id: HirId) -> Option<Self> {
+ if let ExprKind::MethodCall(path, receiver, [], _) = expr.kind
+ && let Some(recv_ty) = cx.typeck_results().expr_ty(receiver).peel_refs().ty_adt_def()
+ {
+ // we still want to lint if the expression possibly contains side effects,
+ // *but* it can't be machine-applicable then, because that can change the behavior of the program:
+ // .filter(|x| effect(x).is_some()).map(|x| effect(x).unwrap())
+ // vs.
+ // .filter_map(|x| effect(x))
+ //
+ // the latter only calls `effect` once
+ let side_effect_expr_span = receiver.can_have_side_effects().then_some(receiver.span);
+
+ if cx.tcx.is_diagnostic_item(sym::Option, recv_ty.did())
+ && path.ident.name == sym!(is_some)
+ {
+ Some(Self::IsSome { receiver, side_effect_expr_span })
+ } else if cx.tcx.is_diagnostic_item(sym::Result, recv_ty.did())
+ && path.ident.name == sym!(is_ok)
+ {
+ Some(Self::IsOk { receiver, side_effect_expr_span })
+ } else {
+ None
+ }
+ } else if let Some(macro_call) = root_macro_call(expr.span)
+ && cx.tcx.get_diagnostic_name(macro_call.def_id) == Some(sym::matches_macro)
+ // we know for a fact that the wildcard pattern is the second arm
+ && let ExprKind::Match(scrutinee, [arm, _], _) = expr.kind
+ && path_to_local_id(scrutinee, filter_param_id)
+ && let PatKind::TupleStruct(QPath::Resolved(_, path), ..) = arm.pat.kind
+ && let Some(variant_def_id) = path.res.opt_def_id()
+ {
+ Some(OffendingFilterExpr::Matches { variant_def_id })
+ } else {
+ None
+ }
+ }
+}
+
/// is `filter(|x| x.is_some()).map(|x| x.unwrap())`
fn is_filter_some_map_unwrap(
cx: &LateContext<'_>,
@@ -104,55 +312,18 @@ pub(super) fn check(
} else {
(filter_param.pat, false)
};
- // closure ends with is_some() or is_ok()
+
if let PatKind::Binding(_, filter_param_id, _, None) = filter_pat.kind;
- if let ExprKind::MethodCall(path, filter_arg, [], _) = filter_body.value.kind;
- if let Some(opt_ty) = cx.typeck_results().expr_ty(filter_arg).peel_refs().ty_adt_def();
- if let Some(is_result) = if cx.tcx.is_diagnostic_item(sym::Option, opt_ty.did()) {
- Some(false)
- } else if cx.tcx.is_diagnostic_item(sym::Result, opt_ty.did()) {
- Some(true)
- } else {
- None
- };
- if path.ident.name.as_str() == if is_result { "is_ok" } else { "is_some" };
+ if let Some(mut offending_expr) = OffendingFilterExpr::hir(cx, filter_body.value, filter_param_id);
- // ...map(|x| ...unwrap())
if let ExprKind::Closure(&Closure { body: map_body_id, .. }) = map_arg.kind;
let map_body = cx.tcx.hir().body(map_body_id);
if let [map_param] = map_body.params;
if let PatKind::Binding(_, map_param_id, map_param_ident, None) = map_param.pat.kind;
- // closure ends with expect() or unwrap()
- if let ExprKind::MethodCall(seg, map_arg, ..) = map_body.value.kind;
- if matches!(seg.ident.name, sym::expect | sym::unwrap | sym::unwrap_or);
-
- // .filter(..).map(|y| f(y).copied().unwrap())
- // ~~~~
- let map_arg_peeled = match map_arg.kind {
- ExprKind::MethodCall(method, original_arg, [], _) if acceptable_methods(method) => {
- original_arg
- },
- _ => map_arg,
- };
- // .filter(|x| x.is_some()).map(|y| y[.acceptable_method()].unwrap())
- let simple_equal = path_to_local_id(filter_arg, filter_param_id)
- && path_to_local_id(map_arg_peeled, map_param_id);
+ if let Some(check_result) =
+ offending_expr.check_map_call(cx, map_body, map_param_id, filter_param_id, is_filter_param_ref);
- let eq_fallback = |a: &Expr<'_>, b: &Expr<'_>| {
- // in `filter(|x| ..)`, replace `*x` with `x`
- let a_path = if_chain! {
- if !is_filter_param_ref;
- if let ExprKind::Unary(UnOp::Deref, expr_path) = a.kind;
- then { expr_path } else { a }
- };
- // let the filter closure arg and the map closure arg be equal
- path_to_local_id(a_path, filter_param_id)
- && path_to_local_id(b, map_param_id)
- && cx.typeck_results().expr_ty_adjusted(a) == cx.typeck_results().expr_ty_adjusted(b)
- };
-
- if simple_equal || SpanlessEq::new(cx).expr_fallback(eq_fallback).eq_expr(filter_arg, map_arg_peeled);
then {
let span = filter_span.with_hi(expr.span.hi());
let (filter_name, lint) = if is_find {
@@ -161,22 +332,53 @@ pub(super) fn check(
("filter", MANUAL_FILTER_MAP)
};
let msg = format!("`{filter_name}(..).map(..)` can be simplified as `{filter_name}_map(..)`");
- let (to_opt, deref) = if is_result {
- (".ok()", String::new())
- } else {
- let derefs = cx.typeck_results()
- .expr_adjustments(map_arg)
- .iter()
- .filter(|adj| matches!(adj.kind, Adjust::Deref(_)))
- .count();
- ("", "*".repeat(derefs))
+ let (sugg, note_and_span, applicability) = match check_result {
+ CheckResult::Method { map_arg, method, side_effect_expr_span } => {
+ let (to_opt, deref) = match method {
+ CalledMethod::ResultIsOk => (".ok()", String::new()),
+ CalledMethod::OptionIsSome => {
+ let derefs = cx.typeck_results()
+ .expr_adjustments(map_arg)
+ .iter()
+ .filter(|adj| matches!(adj.kind, Adjust::Deref(_)))
+ .count();
+
+ ("", "*".repeat(derefs))
+ }
+ };
+
+ let sugg = format!(
+ "{filter_name}_map(|{map_param_ident}| {deref}{}{to_opt})",
+ snippet(cx, map_arg.span, ".."),
+ );
+ let (note_and_span, applicability) = if let Some(span) = side_effect_expr_span {
+ let note = "the suggestion might change the behavior of the program when merging `filter` and `map`, \
+ because this expression potentially contains side effects and will only execute once";
+
+ (Some((note, span)), Applicability::MaybeIncorrect)
+ } else {
+ (None, Applicability::MachineApplicable)
+ };
+
+ (sugg, note_and_span, applicability)
+ }
+ CheckResult::PatternMatching { variant_span, variant_ident } => {
+ let pat = snippet(cx, variant_span, "<pattern>");
+
+ (format!("{filter_name}_map(|{map_param_ident}| match {map_param_ident} {{ \
+ {pat} => Some({variant_ident}), \
+ _ => None \
+ }})"), None, Applicability::MachineApplicable)
+ }
};
- let sugg = format!(
- "{filter_name}_map(|{map_param_ident}| {deref}{}{to_opt})",
- snippet(cx, map_arg.span, ".."),
- );
- span_lint_and_sugg(cx, lint, span, &msg, "try", sugg, Applicability::MachineApplicable);
+ span_lint_and_then(cx, lint, span, &msg, |diag| {
+ diag.span_suggestion(span, "try", sugg, applicability);
+
+ if let Some((note, span)) = note_and_span {
+ diag.span_note(span, note);
+ }
+ });
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs
new file mode 100644
index 000000000..fafc97097
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs
@@ -0,0 +1,53 @@
+use super::FILTER_MAP_BOOL_THEN;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::paths::BOOL_THEN;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::is_copy;
+use clippy_utils::{is_from_proc_macro, is_trait_method, match_def_path, peel_blocks};
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind};
+use rustc_lint::{LateContext, LintContext};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::Binder;
+use rustc_span::{sym, Span};
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: &Expr<'_>, call_span: Span) {
+ if !in_external_macro(cx.sess(), expr.span)
+ && is_trait_method(cx, expr, sym::Iterator)
+ && let ExprKind::Closure(closure) = arg.kind
+ && let body = cx.tcx.hir().body(closure.body)
+ && let value = peel_blocks(body.value)
+ // Indexing should be fine as `filter_map` always has 1 input, we unfortunately need both
+ // `inputs` and `params` here as we need both the type and the span
+ && let param_ty = closure.fn_decl.inputs[0]
+ && let param = body.params[0]
+ // Issue #11309
+ && let param_ty = cx.tcx.liberate_late_bound_regions(
+ closure.def_id.to_def_id(),
+ Binder::bind_with_vars(
+ cx.typeck_results().node_type(param_ty.hir_id),
+ cx.tcx.late_bound_vars(cx.tcx.hir().local_def_id_to_hir_id(closure.def_id)),
+ ),
+ )
+ && is_copy(cx, param_ty)
+ && let ExprKind::MethodCall(_, recv, [then_arg], _) = value.kind
+ && let ExprKind::Closure(then_closure) = then_arg.kind
+ && let then_body = peel_blocks(cx.tcx.hir().body(then_closure.body).value)
+ && let Some(def_id) = cx.typeck_results().type_dependent_def_id(value.hir_id)
+ && match_def_path(cx, def_id, &BOOL_THEN)
+ && !is_from_proc_macro(cx, expr)
+ && let Some(param_snippet) = snippet_opt(cx, param.span)
+ && let Some(filter) = snippet_opt(cx, recv.span)
+ && let Some(map) = snippet_opt(cx, then_body.span)
+ {
+ span_lint_and_sugg(
+ cx,
+ FILTER_MAP_BOOL_THEN,
+ call_span,
+ "usage of `bool::then` in `filter_map`",
+ "use `filter` then `map` instead",
+ format!("filter(|&{param_snippet}| {filter}).map(|{param_snippet}| {map})"),
+ Applicability::MachineApplicable,
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs
index d1b5e945d..3337b250c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs
@@ -3,7 +3,8 @@ use clippy_utils::{is_expr_identity_function, is_trait_method};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
-use rustc_span::{source_map::Span, sym};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
use super::FILTER_MAP_IDENTITY;
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs
index 175e04f8a..3f89e5931 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs
@@ -31,7 +31,7 @@ pub(super) fn check<'tcx>(
FILTER_MAP_NEXT,
expr.span,
msg,
- "try this",
+ "try",
format!("{iter_snippet}.find_map({filter_snippet})"),
Applicability::MachineApplicable,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
index edcec0fc1..ac7bc9bcc 100644
--- a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs
@@ -1,6 +1,7 @@
-use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
use clippy_utils::source::snippet;
use clippy_utils::ty::implements_trait;
+use rustc_ast::{BindingAnnotation, Mutability};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -8,6 +9,21 @@ use rustc_span::sym;
use super::FILTER_NEXT;
+fn path_to_local(expr: &hir::Expr<'_>) -> Option<hir::HirId> {
+ match expr.kind {
+ hir::ExprKind::Field(f, _) => path_to_local(f),
+ hir::ExprKind::Index(recv, _, _) => path_to_local(recv),
+ hir::ExprKind::Path(hir::QPath::Resolved(
+ _,
+ hir::Path {
+ res: rustc_hir::def::Res::Local(local),
+ ..
+ },
+ )) => Some(*local),
+ _ => None,
+ }
+}
+
/// lint use of `filter().next()` for `Iterators`
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
@@ -26,15 +42,30 @@ pub(super) fn check<'tcx>(
if filter_snippet.lines().count() <= 1 {
let iter_snippet = snippet(cx, recv.span, "..");
// add note if not multi-line
- span_lint_and_sugg(
- cx,
- FILTER_NEXT,
- expr.span,
- msg,
- "try this",
- format!("{iter_snippet}.find({filter_snippet})"),
- Applicability::MachineApplicable,
- );
+ span_lint_and_then(cx, FILTER_NEXT, expr.span, msg, |diag| {
+ let (applicability, pat) = if let Some(id) = path_to_local(recv)
+ && let Some(hir::Node::Pat(pat)) = cx.tcx.hir().find(id)
+ && let hir::PatKind::Binding(BindingAnnotation(_, Mutability::Not), _, ident, _) = pat.kind
+ {
+ (Applicability::Unspecified, Some((pat.span, ident)))
+ } else {
+ (Applicability::MachineApplicable, None)
+ };
+
+ diag.span_suggestion(
+ expr.span,
+ "try",
+ format!("{iter_snippet}.find({filter_snippet})"),
+ applicability,
+ );
+
+ if let Some((pat_span, ident)) = pat {
+ diag.span_help(
+ pat_span,
+ format!("you will also need to make `{ident}` mutable, because `find` takes `&mut self`"),
+ );
+ }
+ });
} else {
span_lint(cx, FILTER_NEXT, expr.span, msg);
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs
index 6f911d79d..84a21de0a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs
@@ -3,7 +3,8 @@ use clippy_utils::{is_expr_identity_function, is_trait_method};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
-use rustc_span::{source_map::Span, sym};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
use super::FLAT_MAP_IDENTITY;
diff --git a/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs b/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs
index 615bde941..172c397fb 100644
--- a/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs
@@ -4,7 +4,8 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty;
-use rustc_span::{source_map::Span, sym};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
use super::FLAT_MAP_OPTION;
use clippy_utils::ty::is_type_diagnostic_item;
@@ -15,7 +16,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, arg
}
let arg_ty = cx.typeck_results().expr_ty_adjusted(arg);
let sig = match arg_ty.kind() {
- ty::Closure(_, substs) => substs.as_closure().sig(),
+ ty::Closure(_, args) => args.as_closure().sig(),
_ if arg_ty.is_fn() => arg_ty.fn_sig(cx.tcx),
_ => return,
};
diff --git a/src/tools/clippy/clippy_lints/src/methods/format_collect.rs b/src/tools/clippy/clippy_lints/src/methods/format_collect.rs
new file mode 100644
index 000000000..1f8863f85
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/format_collect.rs
@@ -0,0 +1,33 @@
+use super::FORMAT_COLLECT;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::macros::{is_format_macro, root_macro_call_first_node};
+use clippy_utils::ty::is_type_lang_item;
+use rustc_hir::{Expr, ExprKind, LangItem};
+use rustc_lint::LateContext;
+use rustc_span::Span;
+
+/// Same as `peel_blocks` but only actually considers blocks that are not from an expansion.
+/// This is needed because always calling `peel_blocks` would otherwise remove parts of the
+/// `format!` macro, which would cause `root_macro_call_first_node` to return `None`.
+fn peel_non_expn_blocks<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
+ match expr.kind {
+ ExprKind::Block(block, _) if !expr.span.from_expansion() => peel_non_expn_blocks(block.expr?),
+ _ => Some(expr),
+ }
+}
+
+pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, map_arg: &Expr<'_>, map_span: Span) {
+ if is_type_lang_item(cx, cx.typeck_results().expr_ty(expr), LangItem::String)
+ && let ExprKind::Closure(closure) = map_arg.kind
+ && let body = cx.tcx.hir().body(closure.body)
+ && let Some(value) = peel_non_expn_blocks(body.value)
+ && let Some(mac) = root_macro_call_first_node(cx, value)
+ && is_format_macro(cx, mac.def_id)
+ {
+ span_lint_and_then(cx, FORMAT_COLLECT, expr.span, "use of `format!` to build up a string from an iterator", |diag| {
+ diag.span_help(map_span, "call `fold` instead")
+ .span_help(value.span.source_callsite(), "... and use the `write!` macro here")
+ .note("this can be written more efficiently by appending to a `String` directly");
+ });
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/get_first.rs b/src/tools/clippy/clippy_lints/src/methods/get_first.rs
index 945bbf53b..ee063adac 100644
--- a/src/tools/clippy/clippy_lints/src/methods/get_first.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/get_first.rs
@@ -19,7 +19,7 @@ pub(super) fn check<'tcx>(
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if cx.tcx.type_of(impl_id).subst_identity().is_slice();
+ if cx.tcx.type_of(impl_id).instantiate_identity().is_slice();
if let Some(_) = is_slice_of_primitives(cx, recv);
if let hir::ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = arg.kind;
then {
diff --git a/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs
index e35fb12ed..a8f090d1d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs
@@ -71,7 +71,7 @@ pub(super) fn check<'tcx>(
GET_UNWRAP,
span,
&format!("called `.get{mut_str}().unwrap()` on a {caller_type}. Using `[]` is more clear and more concise"),
- "try this",
+ "try",
format!(
"{borrow_str}{}[{get_args_str}]",
snippet_with_applicability(cx, recv.span, "..", &mut applicability)
diff --git a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
index 5a78a4168..043425300 100644
--- a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs
@@ -54,7 +54,7 @@ pub fn is_clone_like(cx: &LateContext<'_>, method_name: &str, method_def_id: hir
.tcx
.impl_of_method(method_def_id)
.filter(|&impl_did| {
- cx.tcx.type_of(impl_did).subst_identity().is_slice() && cx.tcx.impl_trait_ref(impl_did).is_none()
+ cx.tcx.type_of(impl_did).instantiate_identity().is_slice() && cx.tcx.impl_trait_ref(impl_did).is_none()
})
.is_some(),
_ => false,
diff --git a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
index 424482859..631741d92 100644
--- a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
-use rustc_span::symbol::{Symbol, sym};
+use rustc_span::symbol::{sym, Symbol};
use super::INEFFICIENT_TO_STRING;
@@ -23,9 +23,9 @@ pub fn check(
if args.is_empty() && method_name == sym::to_string;
if let Some(to_string_meth_did) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if match_def_path(cx, to_string_meth_did, &paths::TO_STRING_METHOD);
- if let Some(substs) = cx.typeck_results().node_substs_opt(expr.hir_id);
+ if let Some(args) = cx.typeck_results().node_args_opt(expr.hir_id);
let arg_ty = cx.typeck_results().expr_ty_adjusted(receiver);
- let self_ty = substs.type_at(0);
+ let self_ty = args.type_at(0);
let (deref_self_ty, deref_count) = walk_ptrs_ty_depth(self_ty);
if deref_count >= 1;
if specializes_tostring(cx, deref_self_ty);
@@ -64,8 +64,8 @@ fn specializes_tostring(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
return true;
}
- if let ty::Adt(adt, substs) = ty.kind() {
- cx.tcx.is_diagnostic_item(sym::Cow, adt.did()) && substs.type_at(1).is_str()
+ if let ty::Adt(adt, args) = ty.kind() {
+ cx.tcx.is_diagnostic_item(sym::Cow, adt.did()) && args.type_at(1).is_str()
} else {
false
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs b/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs
index 7fd3ef1a6..23cc192c3 100644
--- a/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs
@@ -2,7 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::is_trait_method;
use rustc_hir as hir;
use rustc_lint::LateContext;
-use rustc_span::{source_map::Span, sym};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
use super::INSPECT_FOR_EACH;
diff --git a/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs b/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs
index 301aff5ae..120f3d5f4 100644
--- a/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs
@@ -1,10 +1,10 @@
//! Lint for `c.is_digit(10)`
use super::IS_DIGIT_ASCII_RADIX;
+use clippy_utils::consts::{constant_full_int, FullInt};
+use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::msrvs::{self, Msrv};
-use clippy_utils::{
- consts::constant_full_int, consts::FullInt, diagnostics::span_lint_and_sugg, source::snippet_with_applicability,
-};
+use clippy_utils::source::snippet_with_applicability;
use rustc_errors::Applicability;
use rustc_hir::Expr;
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs
index c87f5daab..674d34517 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs
@@ -9,8 +9,7 @@ use clippy_utils::visitors::is_local_used;
use rustc_hir::{BindingAnnotation, Body, BorrowKind, ByRef, Expr, ExprKind, Mutability, Pat, PatKind};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::ty;
-use rustc_span::sym;
-use rustc_span::Span;
+use rustc_span::{sym, Span};
/// lint use of:
/// - `hashmap.iter().map(|(_, v)| v)`
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs b/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
index e2029da80..8f885e9f7 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_next_slice.rs
@@ -27,7 +27,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, cal
if derefs_to_slice(cx, caller_expr, cx.typeck_results().expr_ty(caller_expr)).is_some() {
// caller is a Slice
if_chain! {
- if let hir::ExprKind::Index(caller_var, index_expr) = &caller_expr.kind;
+ if let hir::ExprKind::Index(caller_var, index_expr, _) = &caller_expr.kind;
if let Some(higher::Range { start: Some(start_expr), end: None, limits: ast::RangeLimits::HalfOpen })
= higher::Range::hir(index_expr);
if let hir::ExprKind::Lit(start_lit) = &start_expr.kind;
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs
index b4210d875..9f7ec19aa 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs
@@ -51,7 +51,7 @@ pub(super) fn check<'tcx>(
if let Some(mut snip) = snippet_opt(cx, method_span) {
snip.push_str(trailing_clone);
let replace_span = expr.span.with_lo(cloned_recv.span.hi());
- diag.span_suggestion(replace_span, "try this", snip, Applicability::MachineApplicable);
+ diag.span_suggestion(replace_span, "try", snip, Applicability::MachineApplicable);
}
}
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
index 279175e20..39af52141 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::is_trait_method;
-use clippy_utils::path_to_local;
use clippy_utils::source::snippet;
+use clippy_utils::{is_trait_method, path_to_local};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::{BindingAnnotation, Node, PatKind};
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_skip_zero.rs b/src/tools/clippy/clippy_lints/src/methods/iter_skip_zero.rs
new file mode 100644
index 000000000..6b696b42a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_skip_zero.rs
@@ -0,0 +1,34 @@
+use clippy_utils::consts::{constant, Constant};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{is_from_proc_macro, is_trait_method};
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::ITER_SKIP_ZERO;
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg_expr: &Expr<'_>) {
+ if !expr.span.from_expansion()
+ && is_trait_method(cx, expr, sym::Iterator)
+ && let Some(arg) = constant(cx, cx.typeck_results(), arg_expr).and_then(|constant| {
+ if let Constant::Int(arg) = constant {
+ Some(arg)
+ } else {
+ None
+ }
+ })
+ && arg == 0
+ && !is_from_proc_macro(cx, expr)
+ {
+ span_lint_and_then(cx, ITER_SKIP_ZERO, arg_expr.span, "usage of `.skip(0)`", |diag| {
+ diag.span_suggestion(
+ arg_expr.span,
+ "if you meant to skip the first element, use",
+ "1",
+ Applicability::MaybeIncorrect,
+ )
+ .note("this call to `skip` does nothing and is useless; remove it");
+ });
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
index f6772c5c6..2ab721ace 100644
--- a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs
@@ -21,7 +21,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span
ITER_WITH_DRAIN,
span.with_hi(expr.span.hi()),
&format!("`drain(..)` used on a `{ty_name}`"),
- "try this",
+ "try",
"into_iter()".to_string(),
Applicability::MaybeIncorrect,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs
index b9a0ec779..3031193e5 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs
@@ -21,7 +21,7 @@ pub(super) fn check<'tcx>(
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).subst_identity(), sym::Option);
+ if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Option);
if let ExprKind::Call(err_path, [err_arg]) = or_expr.kind;
if is_res_lang_ctor(cx, path_res(cx, err_path), ResultErr);
if is_ok_wrapping(cx, map_expr);
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
index a7284c644..540425eef 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs
@@ -21,13 +21,13 @@ pub fn check(
return;
}
- let Some(mm) = is_min_or_max(cx, unwrap_arg) else { return };
+ let Some(mm) = is_min_or_max(cx, unwrap_arg) else {
+ return;
+ };
if ty.is_signed() {
- use self::{
- MinMax::{Max, Min},
- Sign::{Neg, Pos},
- };
+ use self::MinMax::{Max, Min};
+ use self::Sign::{Neg, Pos};
let Some(sign) = lit_sign(arith_rhs) else {
return;
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
index a08f72540..ab13d30d8 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs
@@ -88,7 +88,7 @@ pub(super) fn check(
MANUAL_STR_REPEAT,
collect_expr.span,
"manual implementation of `str::repeat` using iterators",
- "try this",
+ "try",
format!("{val_str}.repeat({count_snip})"),
app
)
diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs b/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs
index 576a58499..dabed0aff 100644
--- a/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs
@@ -1,15 +1,11 @@
-use clippy_utils::{
- diagnostics::span_lint_and_sugg,
- is_from_proc_macro,
- msrvs::{Msrv, ITERATOR_TRY_FOLD},
- source::snippet_opt,
- ty::implements_trait,
-};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::is_from_proc_macro;
+use clippy_utils::msrvs::{Msrv, ITERATOR_TRY_FOLD};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::implements_trait;
use rustc_errors::Applicability;
-use rustc_hir::{
- def::{DefKind, Res},
- Expr, ExprKind,
-};
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_span::Span;
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs
index 2b26ef014..880efe60c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs
@@ -19,7 +19,7 @@ pub(super) fn check(cx: &LateContext<'_>, e: &hir::Expr<'_>, recv: &hir::Expr<'_
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id);
if cx.tcx.impl_of_method(method_id)
- .map_or(false, |id| is_type_diagnostic_item(cx, cx.tcx.type_of(id).subst_identity(), sym::Option))
+ .map_or(false, |id| is_type_diagnostic_item(cx, cx.tcx.type_of(id).instantiate_identity(), sym::Option))
|| is_diag_trait_item(cx, method_id, sym::Iterator);
if let hir::ExprKind::Closure(&hir::Closure{ body, .. }) = arg.kind;
then {
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
index a0300d278..01cdd02e6 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs
@@ -15,8 +15,8 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, iter: &hir::Expr
let collect_ret_ty = cx.typeck_results().expr_ty(expr);
if_chain! {
if is_type_diagnostic_item(cx, collect_ret_ty, sym::Result);
- if let ty::Adt(_, substs) = collect_ret_ty.kind();
- if let Some(result_t) = substs.types().next();
+ if let ty::Adt(_, args) = collect_ret_ty.kind();
+ if let Some(result_t) = args.types().next();
if result_t.is_unit();
// get parts for snippet
then {
@@ -25,7 +25,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, iter: &hir::Expr
MAP_COLLECT_RESULT_UNIT,
expr.span,
"`.map().collect()` can be replaced with `.try_for_each()`",
- "try this",
+ "try",
format!(
"{}.try_for_each({})",
snippet(cx, iter.span, ".."),
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs b/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs
index a5beb291f..fbb83c8ce 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs
@@ -9,7 +9,7 @@ use super::MAP_ERR_IGNORE;
pub(super) fn check(cx: &LateContext<'_>, e: &Expr<'_>, arg: &Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
- && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).subst_identity(), sym::Result)
+ && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Result)
&& let ExprKind::Closure(&Closure {
capture_clause: CaptureBy::Ref,
body,
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
index 361ffcb5e..e74a76455 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs
@@ -6,7 +6,8 @@ use rustc_errors::Applicability;
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty;
-use rustc_span::{symbol::sym, Span};
+use rustc_span::symbol::sym;
+use rustc_span::Span;
use super::MAP_FLATTEN;
@@ -59,7 +60,7 @@ fn is_map_to_option(cx: &LateContext<'_>, map_arg: &Expr<'_>) -> bool {
match map_closure_ty.kind() {
ty::Closure(_, _) | ty::FnDef(_, _) | ty::FnPtr(_) => {
let map_closure_sig = match map_closure_ty.kind() {
- ty::Closure(_, substs) => substs.as_closure().sig(),
+ ty::Closure(_, args) => args.as_closure().sig(),
_ => map_closure_ty.fn_sig(cx.tcx),
};
let map_closure_return_ty = cx.tcx.erase_late_bound_regions(map_closure_sig.output());
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
index 0f25ef82e..7be1ce483 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs
@@ -4,7 +4,8 @@ use clippy_utils::{is_expr_identity_function, is_trait_method};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
-use rustc_span::{source_map::Span, sym};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
use super::MAP_IDENTITY;
diff --git a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
index 3122f72ee..5464e455d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs
@@ -11,7 +11,8 @@ use rustc_span::symbol::sym;
use super::MAP_UNWRAP_OR;
/// lint use of `map().unwrap_or_else()` for `Option`s and `Result`s
-/// Return true if lint triggered
+///
+/// Returns true if the lint was emitted
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx hir::Expr<'_>,
@@ -63,7 +64,7 @@ pub(super) fn check<'tcx>(
MAP_UNWRAP_OR,
expr.span,
msg,
- "try this",
+ "try",
format!("{var_snippet}.map_or_else({unwrap_snippet}, {map_snippet})"),
Applicability::MachineApplicable,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs
index 24dbe8c1d..42756b27d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs
@@ -17,15 +17,16 @@ mod collapsible_str_replace;
mod drain_collect;
mod err_expect;
mod expect_fun_call;
-mod expect_used;
mod extend_with_drain;
mod filetype_is_file;
mod filter_map;
+mod filter_map_bool_then;
mod filter_map_identity;
mod filter_map_next;
mod filter_next;
mod flat_map_identity;
mod flat_map_option;
+mod format_collect;
mod from_iter_instead_of_collect;
mod get_first;
mod get_last_with_len;
@@ -44,6 +45,7 @@ mod iter_nth_zero;
mod iter_on_single_or_empty_collections;
mod iter_overeager_cloned;
mod iter_skip_next;
+mod iter_skip_zero;
mod iter_with_drain;
mod iterator_step_by_zero;
mod manual_next_back;
@@ -72,6 +74,8 @@ mod or_fun_call;
mod or_then_unwrap;
mod path_buf_push_overwrite;
mod range_zip_with_len;
+mod read_line_without_trim;
+mod readonly_write_lock;
mod repeat_once;
mod search_is_some;
mod seek_from_current;
@@ -84,10 +88,12 @@ mod skip_while_next;
mod stable_sort_primitive;
mod str_splitn;
mod string_extend_chars;
+mod string_lit_chars_any;
mod suspicious_command_arg_space;
mod suspicious_map;
mod suspicious_splitn;
mod suspicious_to_owned;
+mod type_id_on_box;
mod uninit_assumed_init;
mod unit_hash;
mod unnecessary_filter_map;
@@ -98,8 +104,7 @@ mod unnecessary_lazy_eval;
mod unnecessary_literal_unwrap;
mod unnecessary_sort_by;
mod unnecessary_to_owned;
-mod unwrap_or_else_default;
-mod unwrap_used;
+mod unwrap_expect_used;
mod useless_asref;
mod utils;
mod vec_resize_to_zero;
@@ -112,7 +117,7 @@ use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::ty::{contains_ty_adt_constructor_opaque, implements_trait, is_copy, is_type_diagnostic_item};
-use clippy_utils::{contains_return, is_bool, is_trait_method, iter_input_pats, return_ty};
+use clippy_utils::{contains_return, is_bool, is_trait_method, iter_input_pats, peel_blocks, return_ty};
use if_chain::if_chain;
use rustc_hir as hir;
use rustc_hir::{Expr, ExprKind, Node, Stmt, StmtKind, TraitItem, TraitItemKind};
@@ -471,29 +476,40 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
- /// Checks for usage of `_.unwrap_or_else(Default::default)` on `Option` and
- /// `Result` values.
+ /// Checks for usages of the following functions with an argument that constructs a default value
+ /// (e.g., `Default::default` or `String::new`):
+ /// - `unwrap_or`
+ /// - `unwrap_or_else`
+ /// - `or_insert`
+ /// - `or_insert_with`
///
/// ### Why is this bad?
- /// Readability, these can be written as `_.unwrap_or_default`, which is
- /// simpler and more concise.
+ /// Readability. Using `unwrap_or_default` in place of `unwrap_or`/`unwrap_or_else`, or `or_default`
+ /// in place of `or_insert`/`or_insert_with`, is simpler and more concise.
+ ///
+ /// ### Known problems
+ /// In some cases, the argument of `unwrap_or`, etc. is needed for type inference. The lint uses a
+ /// heuristic to try to identify such cases. However, the heuristic can produce false negatives.
///
/// ### Examples
/// ```rust
/// # let x = Some(1);
- /// x.unwrap_or_else(Default::default);
- /// x.unwrap_or_else(u32::default);
+ /// # let mut map = std::collections::HashMap::<u64, String>::new();
+ /// x.unwrap_or(Default::default());
+ /// map.entry(42).or_insert_with(String::new);
/// ```
///
/// Use instead:
/// ```rust
/// # let x = Some(1);
+ /// # let mut map = std::collections::HashMap::<u64, String>::new();
/// x.unwrap_or_default();
+ /// map.entry(42).or_default();
/// ```
#[clippy::version = "1.56.0"]
- pub UNWRAP_OR_ELSE_DEFAULT,
+ pub UNWRAP_OR_DEFAULT,
style,
- "using `.unwrap_or_else(Default::default)`, which is more succinctly expressed as `.unwrap_or_default()`"
+ "using `.unwrap_or`, etc. with an argument that constructs a default value"
}
declare_clippy_lint! {
@@ -2927,6 +2943,37 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
+ /// Looks for calls to `<Box<dyn Any> as Any>::type_id`.
+ ///
+ /// ### Why is this bad?
+ /// This most certainly does not do what the user expects and is very easy to miss.
+ /// Calling `type_id` on a `Box<dyn Any>` calls `type_id` on the `Box<..>` itself,
+ /// so this will return the `TypeId` of the `Box<dyn Any>` type (not the type id
+ /// of the value referenced by the box!).
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// use std::any::{Any, TypeId};
+ ///
+ /// let any_box: Box<dyn Any> = Box::new(42_i32);
+ /// assert_eq!(any_box.type_id(), TypeId::of::<i32>()); // ⚠️ this fails!
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::any::{Any, TypeId};
+ ///
+ /// let any_box: Box<dyn Any> = Box::new(42_i32);
+ /// assert_eq!((*any_box).type_id(), TypeId::of::<i32>());
+ /// // ^ dereference first, to call `type_id` on `dyn Any`
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub TYPE_ID_ON_BOX,
+ suspicious,
+ "calling `.type_id()` on `Box<dyn Any>`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
/// Detects `().hash(_)`.
///
/// ### Why is this bad?
@@ -3316,6 +3363,181 @@ declare_clippy_lint! {
"checks for usage of `Iterator::fold` with a type that implements `Try`"
}
+declare_clippy_lint! {
+ /// Looks for calls to [`Stdin::read_line`] to read a line from the standard input
+ /// into a string, then later attempting to parse this string into a type without first trimming it, which will
+ /// always fail because the string has a trailing newline in it.
+ ///
+ /// ### Why is this bad?
+ /// The `.parse()` call will always fail.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// let mut input = String::new();
+ /// std::io::stdin().read_line(&mut input).expect("Failed to read a line");
+ /// let num: i32 = input.parse().expect("Not a number!");
+ /// assert_eq!(num, 42); // we never even get here!
+ /// ```
+ /// Use instead:
+ /// ```rust,ignore
+ /// let mut input = String::new();
+ /// std::io::stdin().read_line(&mut input).expect("Failed to read a line");
+ /// let num: i32 = input.trim_end().parse().expect("Not a number!");
+ /// // ^^^^^^^^^^^ remove the trailing newline
+ /// assert_eq!(num, 42);
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub READ_LINE_WITHOUT_TRIM,
+ correctness,
+ "calling `Stdin::read_line`, then trying to parse it without first trimming"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for `<string_lit>.chars().any(|i| i == c)`.
+ ///
+ /// ### Why is this bad?
+ /// It's significantly slower than using a pattern instead, like
+ /// `matches!(c, '\\' | '.' | '+')`.
+ ///
+ /// Despite this being faster, this is not `perf` as this is pretty common, and is a rather nice
+ /// way to check if a `char` is any in a set. In any case, this `restriction` lint is available
+ /// for situations where that additional performance is absolutely necessary.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let c = 'c';
+ /// "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # let c = 'c';
+ /// matches!(c, '\\' | '.' | '+' | '*' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub STRING_LIT_CHARS_ANY,
+ restriction,
+ "checks for `<string_lit>.chars().any(|i| i == c)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.map(|_| format!(..)).collect::<String>()`.
+ ///
+ /// ### Why is this bad?
+ /// This allocates a new string for every element in the iterator.
+ /// This can be done more efficiently by creating the `String` once and appending to it in `Iterator::fold`,
+ /// using either the `write!` macro which supports exactly the same syntax as the `format!` macro,
+ /// or concatenating with `+` in case the iterator yields `&str`/`String`.
+ ///
+ /// Note also that `write!`-ing into a `String` can never fail, despite the return type of `write!` being `std::fmt::Result`,
+ /// so it can be safely ignored or unwrapped.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn hex_encode(bytes: &[u8]) -> String {
+ /// bytes.iter().map(|b| format!("{b:02X}")).collect()
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// use std::fmt::Write;
+ /// fn hex_encode(bytes: &[u8]) -> String {
+ /// bytes.iter().fold(String::new(), |mut output, b| {
+ /// let _ = write!(output, "{b:02X}");
+ /// output
+ /// })
+ /// }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub FORMAT_COLLECT,
+ perf,
+ "`format!`ing every element in a collection, then collecting the strings into a new `String`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `.skip(0)` on iterators.
+ ///
+ /// ### Why is this bad?
+ /// This was likely intended to be `.skip(1)` to skip the first element, as `.skip(0)` does
+ /// nothing. If not, the call should be removed.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let v = vec![1, 2, 3];
+ /// let x = v.iter().skip(0).collect::<Vec<_>>();
+ /// let y = v.iter().collect::<Vec<_>>();
+ /// assert_eq!(x, y);
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub ITER_SKIP_ZERO,
+ correctness,
+ "disallows `.skip(0)`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for usage of `bool::then` in `Iterator::filter_map`.
+ ///
+ /// ### Why is this bad?
+ /// This can be written with `filter` then `map` instead, which would reduce nesting and
+ /// separates the filtering from the transformation phase. This comes with no cost to
+ /// performance and is just cleaner.
+ ///
+ /// ### Limitations
+ /// Does not lint `bool::then_some`, as it eagerly evaluates its arguments rather than lazily.
+ /// This can create differing behavior, so better safe than sorry.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # fn really_expensive_fn(i: i32) -> i32 { i }
+ /// # let v = vec![];
+ /// _ = v.into_iter().filter_map(|i| (i % 2 == 0).then(|| really_expensive_fn(i)));
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// # fn really_expensive_fn(i: i32) -> i32 { i }
+ /// # let v = vec![];
+ /// _ = v.into_iter().filter(|i| i % 2 == 0).map(|i| really_expensive_fn(i));
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub FILTER_MAP_BOOL_THEN,
+ style,
+ "checks for usage of `bool::then` in `Iterator::filter_map`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Looks for calls to `RwLock::write` where the lock is only used for reading.
+ ///
+ /// ### Why is this bad?
+ /// The write portion of `RwLock` is exclusive, meaning that no other thread
+ /// can access the lock while this writer is active.
+ ///
+ /// ### Example
+ /// ```rust
+ /// use std::sync::RwLock;
+ /// fn assert_is_zero(lock: &RwLock<i32>) {
+ /// let num = lock.write().unwrap();
+ /// assert_eq!(*num, 0);
+ /// }
+ /// ```
+ ///
+ /// Use instead:
+ /// ```rust
+ /// use std::sync::RwLock;
+ /// fn assert_is_zero(lock: &RwLock<i32>) {
+ /// let num = lock.read().unwrap();
+ /// assert_eq!(*num, 0);
+ /// }
+ /// ```
+ #[clippy::version = "1.73.0"]
+ pub READONLY_WRITE_LOCK,
+ nursery,
+ "acquiring a write lock when a read lock would work"
+}
+
pub struct Methods {
avoid_breaking_exported_api: bool,
msrv: Msrv,
@@ -3346,7 +3568,7 @@ impl_lint_pass!(Methods => [
SHOULD_IMPLEMENT_TRAIT,
WRONG_SELF_CONVENTION,
OK_EXPECT,
- UNWRAP_OR_ELSE_DEFAULT,
+ UNWRAP_OR_DEFAULT,
MAP_UNWRAP_OR,
RESULT_MAP_OR_INTO_OPTION,
OPTION_MAP_OR_NONE,
@@ -3389,6 +3611,7 @@ impl_lint_pass!(Methods => [
STRING_EXTEND_CHARS,
ITER_CLONED_COLLECT,
ITER_WITH_DRAIN,
+ TYPE_ID_ON_BOX,
USELESS_ASREF,
UNNECESSARY_FOLD,
UNNECESSARY_FILTER_MAP,
@@ -3435,6 +3658,7 @@ impl_lint_pass!(Methods => [
REPEAT_ONCE,
STABLE_SORT_PRIMITIVE,
UNIT_HASH,
+ READ_LINE_WITHOUT_TRIM,
UNNECESSARY_SORT_BY,
VEC_RESIZE_TO_ZERO,
VERBOSE_FILE_READS,
@@ -3448,6 +3672,11 @@ impl_lint_pass!(Methods => [
UNNECESSARY_LITERAL_UNWRAP,
DRAIN_COLLECT,
MANUAL_TRY_FOLD,
+ FORMAT_COLLECT,
+ STRING_LIT_CHARS_ANY,
+ ITER_SKIP_ZERO,
+ FILTER_MAP_BOOL_THEN,
+ READONLY_WRITE_LOCK
]);
/// Extracts a method call name, args, and `Span` of the method name.
@@ -3508,11 +3737,11 @@ impl<'tcx> LateLintPass<'tcx> for Methods {
let name = impl_item.ident.name.as_str();
let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id()).def_id;
let item = cx.tcx.hir().expect_item(parent);
- let self_ty = cx.tcx.type_of(item.owner_id).subst_identity();
+ let self_ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
let implements_trait = matches!(item.kind, hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }));
if let hir::ImplItemKind::Fn(ref sig, id) = impl_item.kind {
- let method_sig = cx.tcx.fn_sig(impl_item.owner_id).subst_identity();
+ let method_sig = cx.tcx.fn_sig(impl_item.owner_id).instantiate_identity();
let method_sig = cx.tcx.erase_late_bound_regions(method_sig);
let first_arg_ty_opt = method_sig.inputs().iter().next().copied();
// if this impl block implements a trait, lint in trait definition instead
@@ -3602,8 +3831,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods {
then {
let first_arg_span = first_arg_ty.span;
let first_arg_ty = hir_ty_to_ty(cx.tcx, first_arg_ty);
- let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id())
- .self_ty();
+ let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id()).self_ty();
wrong_self_convention::check(
cx,
item.ident.name.as_str(),
@@ -3620,8 +3848,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods {
if item.ident.name == sym::new;
if let TraitItemKind::Fn(_, _) = item.kind;
let ret_ty = return_ty(cx, item.owner_id);
- let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id())
- .self_ty();
+ let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id()).self_ty();
if !ret_ty.contains(self_ty);
then {
@@ -3653,6 +3880,13 @@ impl Methods {
unnecessary_lazy_eval::check(cx, expr, recv, arg, "and");
}
},
+ ("any", [arg]) if let ExprKind::Closure(arg) = arg.kind
+ && let body = cx.tcx.hir().body(arg.body)
+ && let [param] = body.params
+ && let Some(("chars", recv, _, _, _)) = method_call(recv) =>
+ {
+ string_lit_chars_any::check(cx, expr, recv, param, peel_blocks(body.value), &self.msrv);
+ }
("arg", [arg]) => {
suspicious_command_arg_space::check(cx, recv, arg, span);
}
@@ -3669,8 +3903,9 @@ impl Methods {
Some((name @ ("cloned" | "copied"), recv2, [], _, _)) => {
iter_cloned_collect::check(cx, name, expr, recv2);
},
- Some(("map", m_recv, [m_arg], _, _)) => {
+ Some(("map", m_recv, [m_arg], m_ident_span, _)) => {
map_collect_result_unit::check(cx, expr, m_recv, m_arg);
+ format_collect::check(cx, expr, m_arg, m_ident_span);
},
Some(("take", take_self_arg, [take_arg], _, _)) => {
if self.msrv.meets(msrvs::STR_REPEAT) {
@@ -3712,13 +3947,27 @@ impl Methods {
match method_call(recv) {
Some(("ok", recv, [], _, _)) => ok_expect::check(cx, expr, recv),
Some(("err", recv, [], err_span, _)) => err_expect::check(cx, expr, recv, span, err_span, &self.msrv),
- _ => expect_used::check(cx, expr, recv, false, self.allow_expect_in_tests),
+ _ => unwrap_expect_used::check(
+ cx,
+ expr,
+ recv,
+ false,
+ self.allow_expect_in_tests,
+ unwrap_expect_used::Variant::Expect,
+ ),
}
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
},
("expect_err", [_]) => {
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
- expect_used::check(cx, expr, recv, true, self.allow_expect_in_tests);
+ unwrap_expect_used::check(
+ cx,
+ expr,
+ recv,
+ true,
+ self.allow_expect_in_tests,
+ unwrap_expect_used::Variant::Expect,
+ );
},
("extend", [arg]) => {
string_extend_chars::check(cx, expr, recv, arg);
@@ -3726,6 +3975,7 @@ impl Methods {
},
("filter_map", [arg]) => {
unnecessary_filter_map::check(cx, expr, arg, name);
+ filter_map_bool_then::check(cx, expr, arg, call_span);
filter_map_identity::check(cx, expr, arg, span);
},
("find_map", [arg]) => {
@@ -3769,11 +4019,9 @@ impl Methods {
unnecessary_join::check(cx, expr, recv, join_arg, span);
}
},
- ("last", []) | ("skip", [_]) => {
- if let Some((name2, recv2, args2, _span2, _)) = method_call(recv) {
- if let ("cloned", []) = (name2, args2) {
- iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
- }
+ ("last", []) => {
+ if let Some(("cloned", recv2, [], _span2, _)) = method_call(recv) {
+ iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
}
},
("lock", []) => {
@@ -3846,6 +4094,9 @@ impl Methods {
("read_to_string", [_]) => {
verbose_file_reads::check(cx, expr, recv, verbose_file_reads::READ_TO_STRING_MSG);
},
+ ("read_line", [arg]) => {
+ read_line_without_trim::check(cx, expr, recv, arg);
+ }
("repeat", [arg]) => {
repeat_once::check(cx, expr, recv, arg);
},
@@ -3871,6 +4122,13 @@ impl Methods {
seek_to_start_instead_of_rewind::check(cx, expr, recv, arg, span);
}
},
+ ("skip", [arg]) => {
+ iter_skip_zero::check(cx, expr, arg);
+
+ if let Some(("cloned", recv2, [], _span2, _)) = method_call(recv) {
+ iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
+ }
+ }
("sort", []) => {
stable_sort_primitive::check(cx, expr, recv);
},
@@ -3893,10 +4151,8 @@ impl Methods {
},
("step_by", [arg]) => iterator_step_by_zero::check(cx, expr, arg),
("take", [_arg]) => {
- if let Some((name2, recv2, args2, _span2, _)) = method_call(recv) {
- if let ("cloned", []) = (name2, args2) {
- iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
- }
+ if let Some(("cloned", recv2, [], _span2, _)) = method_call(recv) {
+ iter_overeager_cloned::check(cx, expr, recv, recv2, false, false);
}
},
("take", []) => needless_option_take::check(cx, expr, recv),
@@ -3914,6 +4170,9 @@ impl Methods {
("to_os_string" | "to_path_buf" | "to_vec", []) => {
implicit_clone::check(cx, name, expr, recv);
},
+ ("type_id", []) => {
+ type_id_on_box::check(cx, recv, expr.span);
+ }
("unwrap", []) => {
match method_call(recv) {
Some(("get", recv, [get_arg], _, _)) => {
@@ -3928,11 +4187,25 @@ impl Methods {
_ => {},
}
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
- unwrap_used::check(cx, expr, recv, false, self.allow_unwrap_in_tests);
+ unwrap_expect_used::check(
+ cx,
+ expr,
+ recv,
+ false,
+ self.allow_unwrap_in_tests,
+ unwrap_expect_used::Variant::Unwrap,
+ );
},
("unwrap_err", []) => {
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
- unwrap_used::check(cx, expr, recv, true, self.allow_unwrap_in_tests);
+ unwrap_expect_used::check(
+ cx,
+ expr,
+ recv,
+ true,
+ self.allow_unwrap_in_tests,
+ unwrap_expect_used::Variant::Unwrap,
+ );
},
("unwrap_or", [u_arg]) => {
match method_call(recv) {
@@ -3949,7 +4222,7 @@ impl Methods {
}
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
},
- ("unwrap_or_default", []) => {
+ ("unwrap_or_default" | "unwrap_unchecked" | "unwrap_err_unchecked", []) => {
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
}
("unwrap_or_else", [u_arg]) => {
@@ -3957,12 +4230,14 @@ impl Methods {
Some(("map", recv, [map_arg], _, _))
if map_unwrap_or::check(cx, expr, recv, map_arg, u_arg, &self.msrv) => {},
_ => {
- unwrap_or_else_default::check(cx, expr, recv, u_arg);
unnecessary_lazy_eval::check(cx, expr, recv, u_arg, "unwrap_or");
},
}
unnecessary_literal_unwrap::check(cx, expr, recv, name, args);
},
+ ("write", []) => {
+ readonly_write_lock::check(cx, expr, recv);
+ }
("zip", [arg]) => {
if let ExprKind::MethodCall(name, iter_recv, [], _) = recv.kind
&& name.ident.name == sym::iter
@@ -4113,8 +4388,8 @@ impl SelfKind {
} else if ty.is_box() {
ty.boxed_ty() == parent_ty
} else if is_type_diagnostic_item(cx, ty, sym::Rc) || is_type_diagnostic_item(cx, ty, sym::Arc) {
- if let ty::Adt(_, substs) = ty.kind() {
- substs.types().next().map_or(false, |t| t == parent_ty)
+ if let ty::Adt(_, args) = ty.kind() {
+ args.types().next().map_or(false, |t| t == parent_ty)
} else {
false
}
@@ -4134,7 +4409,7 @@ impl SelfKind {
};
let Some(trait_def_id) = cx.tcx.get_diagnostic_item(trait_sym) else {
- return false
+ return false;
};
implements_trait(cx, ty, trait_def_id, &[parent_ty.into()])
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs
index d0aa39d06..2855e23bf 100644
--- a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs
@@ -1,5 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
-use clippy_utils::{expr_custom_deref_adjustment, ty::is_type_diagnostic_item};
+use clippy_utils::expr_custom_deref_adjustment;
+use clippy_utils::ty::is_type_diagnostic_item;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, Mutability};
@@ -15,7 +16,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'tcx>, recv: &'
if let ty::Ref(_, _, Mutability::Mut) = cx.typeck_results().expr_ty(recv).kind();
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(ex.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).subst_identity(), sym::Mutex);
+ if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Mutex);
then {
span_lint_and_sugg(
cx,
diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs
index 8ca7af810..dbd965d65 100644
--- a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs
@@ -4,10 +4,9 @@ use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_type_diagnostic_item, make_normalized_projection, make_projection};
use clippy_utils::{
- can_move_expr_to_closure, get_enclosing_block, get_parent_node, is_trait_method, path_to_local, path_to_local_id,
- CaptureKind,
+ can_move_expr_to_closure, fn_def_id, get_enclosing_block, get_parent_node, higher, is_trait_method, path_to_local,
+ path_to_local_id, CaptureKind,
};
-use clippy_utils::{fn_def_id, higher};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{Applicability, MultiSpan};
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
@@ -163,7 +162,7 @@ fn check_collect_into_intoiterator<'tcx>(
// that contains `collect_expr`
let inputs = cx
.tcx
- .liberate_late_bound_regions(id, cx.tcx.fn_sig(id).subst_identity())
+ .liberate_late_bound_regions(id, cx.tcx.fn_sig(id).instantiate_identity())
.inputs();
// map IntoIterator generic bounds to their signature
@@ -201,7 +200,7 @@ fn check_collect_into_intoiterator<'tcx>(
/// Checks if the given method call matches the expected signature of `([&[mut]] self) -> bool`
fn is_is_empty_sig(cx: &LateContext<'_>, call_id: HirId) -> bool {
cx.typeck_results().type_dependent_def_id(call_id).map_or(false, |id| {
- let sig = cx.tcx.fn_sig(id).subst_identity().skip_binder();
+ let sig = cx.tcx.fn_sig(id).instantiate_identity().skip_binder();
sig.inputs().len() == 1 && sig.output().is_bool()
})
}
@@ -215,7 +214,7 @@ fn iterates_same_ty<'tcx>(cx: &LateContext<'tcx>, iter_ty: Ty<'tcx>, collect_ty:
&& let Some(into_iter_item_proj) = make_projection(cx.tcx, into_iter_trait, item, [collect_ty])
&& let Ok(into_iter_item_ty) = cx.tcx.try_normalize_erasing_regions(
cx.param_env,
- Ty::new_projection(cx.tcx,into_iter_item_proj.def_id, into_iter_item_proj.substs)
+ Ty::new_projection(cx.tcx,into_iter_item_proj.def_id, into_iter_item_proj.args)
)
{
iter_item_ty == into_iter_item_ty
@@ -229,7 +228,7 @@ fn iterates_same_ty<'tcx>(cx: &LateContext<'tcx>, iter_ty: Ty<'tcx>, collect_ty:
fn is_contains_sig(cx: &LateContext<'_>, call_id: HirId, iter_expr: &Expr<'_>) -> bool {
let typeck = cx.typeck_results();
if let Some(id) = typeck.type_dependent_def_id(call_id)
- && let sig = cx.tcx.fn_sig(id).subst_identity()
+ && let sig = cx.tcx.fn_sig(id).instantiate_identity()
&& sig.skip_binder().output().is_bool()
&& let [_, search_ty] = *sig.skip_binder().inputs()
&& let ty::Ref(_, search_ty, Mutability::Not) = *cx.tcx.erase_late_bound_regions(sig.rebind(search_ty)).kind()
@@ -237,11 +236,11 @@ fn is_contains_sig(cx: &LateContext<'_>, call_id: HirId, iter_expr: &Expr<'_>) -
&& let Some(iter_item) = cx.tcx
.associated_items(iter_trait)
.find_by_name_and_kind(cx.tcx, Ident::with_dummy_span(Symbol::intern("Item")), AssocKind::Type, iter_trait)
- && let substs = cx.tcx.mk_substs(&[GenericArg::from(typeck.expr_ty_adjusted(iter_expr))])
- && let proj_ty = Ty::new_projection(cx.tcx,iter_item.def_id, substs)
+ && let args = cx.tcx.mk_args(&[GenericArg::from(typeck.expr_ty_adjusted(iter_expr))])
+ && let proj_ty = Ty::new_projection(cx.tcx,iter_item.def_id, args)
&& let Ok(item_ty) = cx.tcx.try_normalize_erasing_regions(cx.param_env, proj_ty)
{
- item_ty == EarlyBinder::bind(search_ty).subst(cx.tcx, cx.typeck_results().node_substs(call_id))
+ item_ty == EarlyBinder::bind(search_ty).instantiate(cx.tcx, cx.typeck_results().node_args(call_id))
} else {
false
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs b/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs
index 7030baf19..eaae8613d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs
@@ -17,7 +17,9 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, name
if is_type_diagnostic_item(cx, outer_ty, sym::Option) && outer_ty == typeck.expr_ty(recv) {
if name == "as_deref_mut" && recv.is_syntactic_place_expr() {
- let Res::Local(binding_id) = path_res(cx, recv) else { return };
+ let Res::Local(binding_id) = path_res(cx, recv) else {
+ return;
+ };
if local_used_after_expr(cx, binding_id, recv) {
return;
@@ -29,7 +31,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, name
NEEDLESS_OPTION_AS_DEREF,
expr.span,
"derefed type is same as origin",
- "try this",
+ "try",
snippet_opt(cx, recv.span).unwrap(),
Applicability::MachineApplicable,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs
index eada530d6..697eab32a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs
@@ -1,5 +1,6 @@
use super::OBFUSCATED_IF_ELSE;
-use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_with_applicability};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs b/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
index 646fc4a7b..f2ef42933 100644
--- a/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/ok_expect.rs
@@ -33,7 +33,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr
/// Given a `Result<T, E>` type, return its error type (`E`).
fn get_error_type<'a>(cx: &LateContext<'_>, ty: Ty<'a>) -> Option<Ty<'a>> {
match ty.kind() {
- ty::Adt(_, substs) if is_type_diagnostic_item(cx, ty, sym::Result) => substs.types().nth(1),
+ ty::Adt(_, args) if is_type_diagnostic_item(cx, ty, sym::Result) => args.types().nth(1),
_ => None,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/methods/open_options.rs b/src/tools/clippy/clippy_lints/src/methods/open_options.rs
index bd625a691..1c664e76d 100644
--- a/src/tools/clippy/clippy_lints/src/methods/open_options.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/open_options.rs
@@ -11,7 +11,7 @@ use super::NONSENSICAL_OPEN_OPTIONS;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
- && match_type(cx, cx.tcx.type_of(impl_id).subst_identity(), &paths::OPEN_OPTIONS)
+ && match_type(cx, cx.tcx.type_of(impl_id).instantiate_identity(), &paths::OPEN_OPTIONS)
{
let mut options = Vec::new();
get_open_options(cx, recv, &mut options);
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
index 41ceef19e..cb6a23068 100644
--- a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs
@@ -8,8 +8,7 @@ use rustc_hir::LangItem::{OptionNone, OptionSome};
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
-use super::OPTION_MAP_OR_NONE;
-use super::RESULT_MAP_OR_INTO_OPTION;
+use super::{OPTION_MAP_OR_NONE, RESULT_MAP_OR_INTO_OPTION};
// The expression inside a closure may or may not have surrounding braces
// which causes problems when generating a suggestion.
diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
index f4f158c04..fcbe005fb 100644
--- a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs
@@ -1,17 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::snippet_with_applicability;
-use clippy_utils::ty::is_copy;
-use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::ty::{is_copy, is_type_diagnostic_item};
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::intravisit::{walk_path, Visitor};
-use rustc_hir::ExprKind;
-use rustc_hir::Node;
-use rustc_hir::PatKind;
-use rustc_hir::QPath;
-use rustc_hir::{self, HirId, Path};
+use rustc_hir::{self, ExprKind, HirId, Node, PatKind, Path, QPath};
use rustc_lint::LateContext;
use rustc_middle::hir::nested_filter;
use rustc_span::source_map::Span;
diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
index 7ce28ea93..8b2f57160 100644
--- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs
@@ -1,16 +1,17 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::eager_or_lazy::switch_to_lazy_eval;
use clippy_utils::source::snippet_with_context;
-use clippy_utils::ty::{implements_trait, is_type_diagnostic_item};
-use clippy_utils::{contains_return, is_trait_item, last_path_segment};
+use clippy_utils::ty::{expr_type_is_certain, implements_trait, is_type_diagnostic_item};
+use clippy_utils::{contains_return, is_default_equivalent, is_default_equivalent_call, last_path_segment};
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir as hir;
use rustc_lint::LateContext;
+use rustc_middle::ty;
use rustc_span::source_map::Span;
-use rustc_span::symbol::{kw, sym, Symbol};
+use rustc_span::symbol::{self, sym, Symbol};
+use {rustc_ast as ast, rustc_hir as hir};
-use super::OR_FUN_CALL;
+use super::{OR_FUN_CALL, UNWRAP_OR_DEFAULT};
/// Checks for the `OR_FUN_CALL` lint.
#[allow(clippy::too_many_lines)]
@@ -24,53 +25,72 @@ pub(super) fn check<'tcx>(
) {
/// Checks for `unwrap_or(T::new())`, `unwrap_or(T::default())`,
/// `or_insert(T::new())` or `or_insert(T::default())`.
+ /// Similarly checks for `unwrap_or_else(T::new)`, `unwrap_or_else(T::default)`,
+ /// `or_insert_with(T::new)` or `or_insert_with(T::default)`.
#[allow(clippy::too_many_arguments)]
fn check_unwrap_or_default(
cx: &LateContext<'_>,
name: &str,
+ receiver: &hir::Expr<'_>,
fun: &hir::Expr<'_>,
- arg: &hir::Expr<'_>,
- or_has_args: bool,
+ call_expr: Option<&hir::Expr<'_>>,
span: Span,
method_span: Span,
) -> bool {
- let is_default_default = || is_trait_item(cx, fun, sym::Default);
+ if !expr_type_is_certain(cx, receiver) {
+ return false;
+ }
- let implements_default = |arg, default_trait_id| {
- let arg_ty = cx.typeck_results().expr_ty(arg);
- implements_trait(cx, arg_ty, default_trait_id, &[])
+ let is_new = |fun: &hir::Expr<'_>| {
+ if let hir::ExprKind::Path(ref qpath) = fun.kind {
+ let path = last_path_segment(qpath).ident.name;
+ matches!(path, sym::new)
+ } else {
+ false
+ }
};
- if_chain! {
- if !or_has_args;
- if let Some(sugg) = match name {
- "unwrap_or" => Some("unwrap_or_default"),
- "or_insert" => Some("or_default"),
- _ => None,
- };
- if let hir::ExprKind::Path(ref qpath) = fun.kind;
- if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default);
- let path = last_path_segment(qpath).ident.name;
- // needs to target Default::default in particular or be *::new and have a Default impl
- // available
- if (matches!(path, kw::Default) && is_default_default())
- || (matches!(path, sym::new) && implements_default(arg, default_trait_id));
-
- then {
- span_lint_and_sugg(
- cx,
- OR_FUN_CALL,
- method_span.with_hi(span.hi()),
- &format!("use of `{name}` followed by a call to `{path}`"),
- "try this",
- format!("{sugg}()"),
- Applicability::MachineApplicable,
- );
-
- true
+ let output_type_implements_default = |fun| {
+ let fun_ty = cx.typeck_results().expr_ty(fun);
+ if let ty::FnDef(def_id, args) = fun_ty.kind() {
+ let output_ty = cx.tcx.fn_sig(def_id).instantiate(cx.tcx, args).skip_binder().output();
+ cx.tcx
+ .get_diagnostic_item(sym::Default)
+ .map_or(false, |default_trait_id| {
+ implements_trait(cx, output_ty, default_trait_id, &[])
+ })
} else {
false
}
+ };
+
+ let sugg = match (name, call_expr.is_some()) {
+ ("unwrap_or", true) | ("unwrap_or_else", false) => "unwrap_or_default",
+ ("or_insert", true) | ("or_insert_with", false) => "or_default",
+ _ => return false,
+ };
+
+ // needs to target Default::default in particular or be *::new and have a Default impl
+ // available
+ if (is_new(fun) && output_type_implements_default(fun))
+ || match call_expr {
+ Some(call_expr) => is_default_equivalent(cx, call_expr),
+ None => is_default_equivalent_call(cx, fun) || closure_body_returns_empty_to_string(cx, fun),
+ }
+ {
+ span_lint_and_sugg(
+ cx,
+ UNWRAP_OR_DEFAULT,
+ method_span.with_hi(span.hi()),
+ &format!("use of `{name}` to construct default value"),
+ "try",
+ format!("{sugg}()"),
+ Applicability::MachineApplicable,
+ );
+
+ true
+ } else {
+ false
}
}
@@ -139,7 +159,7 @@ pub(super) fn check<'tcx>(
OR_FUN_CALL,
span_replace_word,
&format!("use of `{name}` followed by a function call"),
- "try this",
+ "try",
format!("{name}_{suffix}({sugg})"),
app,
);
@@ -168,11 +188,16 @@ pub(super) fn check<'tcx>(
match inner_arg.kind {
hir::ExprKind::Call(fun, or_args) => {
let or_has_args = !or_args.is_empty();
- if !check_unwrap_or_default(cx, name, fun, arg, or_has_args, expr.span, method_span) {
+ if or_has_args
+ || !check_unwrap_or_default(cx, name, receiver, fun, Some(inner_arg), expr.span, method_span)
+ {
let fun_span = if or_has_args { None } else { Some(fun.span) };
check_general_case(cx, name, method_span, receiver, arg, None, expr.span, fun_span);
}
},
+ hir::ExprKind::Path(..) | hir::ExprKind::Closure(..) => {
+ check_unwrap_or_default(cx, name, receiver, inner_arg, None, expr.span, method_span);
+ },
hir::ExprKind::Index(..) | hir::ExprKind::MethodCall(..) => {
check_general_case(cx, name, method_span, receiver, arg, None, expr.span, None);
},
@@ -189,3 +214,22 @@ pub(super) fn check<'tcx>(
}
}
}
+
+fn closure_body_returns_empty_to_string(cx: &LateContext<'_>, e: &hir::Expr<'_>) -> bool {
+ if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = e.kind {
+ let body = cx.tcx.hir().body(body);
+
+ if body.params.is_empty()
+ && let hir::Expr{ kind, .. } = &body.value
+ && let hir::ExprKind::MethodCall(hir::PathSegment {ident, ..}, self_arg, _, _) = kind
+ && ident.name == sym::to_string
+ && let hir::Expr{ kind, .. } = self_arg
+ && let hir::ExprKind::Lit(lit) = kind
+ && let ast::LitKind::Str(symbol::kw::Empty, _) = lit.node
+ {
+ return true;
+ }
+ }
+
+ false
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs
index 55ba6e262..7b0bdcf99 100644
--- a/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs
@@ -1,8 +1,10 @@
+use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::{diagnostics::span_lint_and_sugg, is_res_lang_ctor, path_res};
+use clippy_utils::{is_res_lang_ctor, path_res};
use rustc_errors::Applicability;
-use rustc_hir::{lang_items::LangItem, Expr, ExprKind};
+use rustc_hir::lang_items::LangItem;
+use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_span::{sym, Span};
@@ -50,7 +52,7 @@ pub(super) fn check<'tcx>(
OR_THEN_UNWRAP,
unwrap_expr.span.with_lo(or_span.lo()),
title,
- "try this",
+ "try",
suggestion,
applicability,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs
index 0284d9dea..1c07d2a3a 100644
--- a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs
@@ -14,7 +14,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, arg: &'t
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).subst_identity(), sym::PathBuf);
+ if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::PathBuf);
if let ExprKind::Lit(lit) = arg.kind;
if let LitKind::Str(ref path_lit, _) = lit.node;
if let pushed_path = Path::new(path_lit.as_str());
diff --git a/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs b/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs
index 867a3b402..f253d8de9 100644
--- a/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::source::snippet;
-use clippy_utils::{higher, SpanlessEq};
-use clippy_utils::{is_integer_const, is_trait_method};
+use clippy_utils::{higher, is_integer_const, is_trait_method, SpanlessEq};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs b/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs
new file mode 100644
index 000000000..81f9e2a77
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs
@@ -0,0 +1,74 @@
+use std::ops::ControlFlow;
+
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::visitors::for_each_local_use_after_expr;
+use clippy_utils::{get_parent_expr, match_def_path};
+use rustc_errors::Applicability;
+use rustc_hir::def::Res;
+use rustc_hir::{Expr, ExprKind, QPath};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, Ty};
+use rustc_span::sym;
+
+use super::READ_LINE_WITHOUT_TRIM;
+
+/// Will a `.parse::<ty>()` call fail if the input has a trailing newline?
+fn parse_fails_on_trailing_newline(ty: Ty<'_>) -> bool {
+ // only allow a very limited set of types for now, for which we 100% know parsing will fail
+ matches!(ty.kind(), ty::Float(_) | ty::Bool | ty::Int(_) | ty::Uint(_))
+}
+
+pub fn check(cx: &LateContext<'_>, call: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) {
+ if let Some(recv_adt) = cx.typeck_results().expr_ty(recv).ty_adt_def()
+ && match_def_path(cx, recv_adt.did(), &["std", "io", "stdio", "Stdin"])
+ && let ExprKind::Path(QPath::Resolved(_, path)) = arg.peel_borrows().kind
+ && let Res::Local(local_id) = path.res
+ {
+ // We've checked that `call` is a call to `Stdin::read_line()` with the right receiver,
+ // now let's check if the first use of the string passed to `::read_line()` is
+ // parsed into a type that will always fail if it has a trailing newline.
+ for_each_local_use_after_expr(cx, local_id, call.hir_id, |expr| {
+ if let Some(parent) = get_parent_expr(cx, expr)
+ && let ExprKind::MethodCall(segment, .., span) = parent.kind
+ && segment.ident.name == sym!(parse)
+ && let parse_result_ty = cx.typeck_results().expr_ty(parent)
+ && is_type_diagnostic_item(cx, parse_result_ty, sym::Result)
+ && let ty::Adt(_, args) = parse_result_ty.kind()
+ && let Some(ok_ty) = args[0].as_type()
+ && parse_fails_on_trailing_newline(ok_ty)
+ {
+ let local_snippet = snippet(cx, expr.span, "<expr>");
+ span_lint_and_then(
+ cx,
+ READ_LINE_WITHOUT_TRIM,
+ span,
+ "calling `.parse()` without trimming the trailing newline character",
+ |diag| {
+ diag.span_note(call.span, "call to `.read_line()` here, \
+ which leaves a trailing newline character in the buffer, \
+ which in turn will cause `.parse()` to fail");
+
+ diag.span_suggestion(
+ expr.span,
+ "try",
+ format!("{local_snippet}.trim_end()"),
+ Applicability::MachineApplicable,
+ );
+ }
+ );
+ }
+
+ // only consider the first use to prevent this scenario:
+ // ```
+ // let mut s = String::new();
+ // std::io::stdin().read_line(&mut s);
+ // s.pop();
+ // let _x: i32 = s.parse().unwrap();
+ // ```
+ // this is actually fine, because the pop call removes the trailing newline.
+ ControlFlow::<(), ()>::Break(())
+ });
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/readonly_write_lock.rs b/src/tools/clippy/clippy_lints/src/methods/readonly_write_lock.rs
new file mode 100644
index 000000000..e3ec921da
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/readonly_write_lock.rs
@@ -0,0 +1,52 @@
+use super::READONLY_WRITE_LOCK;
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::mir::{enclosing_mir, visit_local_usage};
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_type_diagnostic_item;
+use rustc_errors::Applicability;
+use rustc_hir::{Expr, ExprKind, Node};
+use rustc_lint::LateContext;
+use rustc_middle::mir::{Location, START_BLOCK};
+use rustc_span::sym;
+
+fn is_unwrap_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ if let ExprKind::MethodCall(path, receiver, ..) = expr.kind
+ && path.ident.name == sym::unwrap
+ {
+ is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(receiver).peel_refs(), sym::Result)
+ } else {
+ false
+ }
+}
+
+pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, receiver: &Expr<'_>) {
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(receiver).peel_refs(), sym::RwLock)
+ && let Node::Expr(unwrap_call_expr) = cx.tcx.hir().get_parent(expr.hir_id)
+ && is_unwrap_call(cx, unwrap_call_expr)
+ && let parent = cx.tcx.hir().get_parent(unwrap_call_expr.hir_id)
+ && let Node::Local(local) = parent
+ && let Some(mir) = enclosing_mir(cx.tcx, expr.hir_id)
+ && let Some((local, _)) = mir.local_decls.iter_enumerated().find(|(_, decl)| {
+ local.span.contains(decl.source_info.span)
+ })
+ && let Some(usages) = visit_local_usage(&[local], mir, Location {
+ block: START_BLOCK,
+ statement_index: 0,
+ })
+ && let [usage] = usages.as_slice()
+ {
+ let writer_never_mutated = usage.local_consume_or_mutate_locs.is_empty();
+
+ if writer_never_mutated {
+ span_lint_and_sugg(
+ cx,
+ READONLY_WRITE_LOCK,
+ expr.span,
+ "this write lock is used only for reading",
+ "consider using a read lock instead",
+ format!("{}.read()", snippet(cx, receiver.span, "<receiver>")),
+ Applicability::MaybeIncorrect // write lock might be intentional for enforcing exclusiveness
+ );
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs b/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs
index c028e9543..f3d6a15ed 100644
--- a/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs
@@ -3,10 +3,10 @@ use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
-use clippy_utils::{
- diagnostics::span_lint_and_sugg, get_trait_def_id, match_def_path, paths, source::snippet_with_applicability,
- ty::implements_trait,
-};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_with_applicability;
+use clippy_utils::ty::implements_trait;
+use clippy_utils::{get_trait_def_id, match_def_path, paths};
use super::SEEK_FROM_CURRENT;
diff --git a/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs b/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs
index b5fd0ad8c..0f4c97022 100644
--- a/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs
@@ -10,7 +10,7 @@ use super::STABLE_SORT_PRIMITIVE;
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& let Some(impl_id) = cx.tcx.impl_of_method(method_id)
- && cx.tcx.type_of(impl_id).subst_identity().is_slice()
+ && cx.tcx.type_of(impl_id).instantiate_identity().is_slice()
&& let Some(slice_type) = is_slice_of_primitives(cx, recv)
{
span_lint_and_then(
diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
index 88a3c2620..7016ad0a8 100644
--- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs
@@ -55,7 +55,7 @@ fn lint_needless(cx: &LateContext<'_>, method_name: &str, expr: &Expr<'_>, self_
NEEDLESS_SPLITN,
expr.span,
&format!("unnecessary use of `{r}splitn`"),
- "try this",
+ "try",
format!(
"{}.{r}split({})",
snippet_with_context(cx, self_arg.span, expr.span.ctxt(), "..", &mut app).0,
@@ -110,7 +110,7 @@ fn check_manual_split_once(
IterUsageKind::Nth(_) => return,
};
- span_lint_and_sugg(cx, MANUAL_SPLIT_ONCE, usage.span, msg, "try this", sugg, app);
+ span_lint_and_sugg(cx, MANUAL_SPLIT_ONCE, usage.span, msg, "try", sugg, app);
}
/// checks for
@@ -236,7 +236,7 @@ fn indirect_usage<'tcx>(
!matches!(
node,
Node::Expr(Expr {
- kind: ExprKind::Match(.., MatchSource::TryDesugar),
+ kind: ExprKind::Match(.., MatchSource::TryDesugar(_)),
..
})
)
diff --git a/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs b/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs
index 2c20c6d75..c7885f689 100644
--- a/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs
@@ -34,7 +34,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr
STRING_EXTEND_CHARS,
expr.span,
"calling `.extend(_.chars())`",
- "try this",
+ "try",
format!(
"{}.push_str({ref_str}{})",
snippet_with_applicability(cx, recv.span, "..", &mut applicability),
diff --git a/src/tools/clippy/clippy_lints/src/methods/string_lit_chars_any.rs b/src/tools/clippy/clippy_lints/src/methods/string_lit_chars_any.rs
new file mode 100644
index 000000000..70da6ad58
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/string_lit_chars_any.rs
@@ -0,0 +1,58 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::msrvs::{Msrv, MATCHES_MACRO};
+use clippy_utils::source::snippet_opt;
+use clippy_utils::{is_from_proc_macro, is_trait_method, path_to_local};
+use itertools::Itertools;
+use rustc_ast::LitKind;
+use rustc_errors::Applicability;
+use rustc_hir::{BinOpKind, Expr, ExprKind, Param, PatKind};
+use rustc_lint::LateContext;
+use rustc_span::sym;
+
+use super::STRING_LIT_CHARS_ANY;
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+ recv: &Expr<'_>,
+ param: &'tcx Param<'tcx>,
+ body: &Expr<'_>,
+ msrv: &Msrv,
+) {
+ if msrv.meets(MATCHES_MACRO)
+ && is_trait_method(cx, expr, sym::Iterator)
+ && let PatKind::Binding(_, arg, _, _) = param.pat.kind
+ && let ExprKind::Lit(lit_kind) = recv.kind
+ && let LitKind::Str(val, _) = lit_kind.node
+ && let ExprKind::Binary(kind, lhs, rhs) = body.kind
+ && let BinOpKind::Eq = kind.node
+ && let Some(lhs_path) = path_to_local(lhs)
+ && let Some(rhs_path) = path_to_local(rhs)
+ && let scrutinee = match (lhs_path == arg, rhs_path == arg) {
+ (true, false) => rhs,
+ (false, true) => lhs,
+ _ => return,
+ }
+ && !is_from_proc_macro(cx, expr)
+ && let Some(scrutinee_snip) = snippet_opt(cx, scrutinee.span)
+ {
+ // Normalize the char using `map` so `join` doesn't use `Display`, if we don't then
+ // something like `r"\"` will become `'\'`, which is of course invalid
+ let pat_snip = val.as_str().chars().map(|c| format!("{c:?}")).join(" | ");
+
+ span_lint_and_then(
+ cx,
+ STRING_LIT_CHARS_ANY,
+ expr.span,
+ "usage of `.chars().any(...)` to check if a char matches any from a string literal",
+ |diag| {
+ diag.span_suggestion_verbose(
+ expr.span,
+ "use `matches!(...)` instead",
+ format!("matches!({scrutinee_snip}, {pat_snip})"),
+ Applicability::MachineApplicable,
+ );
+ }
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs
index 73632c5a3..bc8f01767 100644
--- a/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs
@@ -1,11 +1,10 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::paths;
use clippy_utils::ty::match_type;
-use rustc_ast as ast;
use rustc_errors::{Applicability, Diagnostic};
-use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::Span;
+use {rustc_ast as ast, rustc_hir as hir};
use super::SUSPICIOUS_COMMAND_ARG_SPACE;
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
index 90ca66bd7..3cb2719e4 100644
--- a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs
@@ -13,7 +13,7 @@ pub(super) fn check(cx: &LateContext<'_>, method_name: &str, expr: &Expr<'_>, se
if let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(call_id);
if cx.tcx.impl_trait_ref(impl_id).is_none();
- let self_ty = cx.tcx.type_of(impl_id).subst_identity();
+ let self_ty = cx.tcx.type_of(impl_id).instantiate_identity();
if self_ty.is_slice() || self_ty.is_str();
then {
// Ignore empty slice and string literals when used with a literal count.
diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs
index e818f1892..9eb8d6e6e 100644
--- a/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs
@@ -5,7 +5,8 @@ use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, print::with_forced_trimmed_paths};
+use rustc_middle::ty::print::with_forced_trimmed_paths;
+use rustc_middle::ty::{self};
use rustc_span::sym;
use super::SUSPICIOUS_TO_OWNED;
diff --git a/src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs b/src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs
new file mode 100644
index 000000000..3404bdfe7
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs
@@ -0,0 +1,62 @@
+use crate::methods::TYPE_ID_ON_BOX;
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::snippet;
+use rustc_errors::Applicability;
+use rustc_hir::Expr;
+use rustc_lint::LateContext;
+use rustc_middle::ty::adjustment::{Adjust, Adjustment};
+use rustc_middle::ty::{self, ExistentialPredicate, Ty};
+use rustc_span::{sym, Span};
+
+fn is_dyn_any(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
+ if let ty::Dynamic(preds, ..) = ty.kind() {
+ preds.iter().any(|p| match p.skip_binder() {
+ ExistentialPredicate::Trait(tr) => cx.tcx.is_diagnostic_item(sym::Any, tr.def_id),
+ _ => false,
+ })
+ } else {
+ false
+ }
+}
+
+pub(super) fn check(cx: &LateContext<'_>, receiver: &Expr<'_>, call_span: Span) {
+ let recv_adjusts = cx.typeck_results().expr_adjustments(receiver);
+
+ if let Some(Adjustment { target: recv_ty, .. }) = recv_adjusts.last()
+ && let ty::Ref(_, ty, _) = recv_ty.kind()
+ && let ty::Adt(adt, args) = ty.kind()
+ && adt.is_box()
+ && is_dyn_any(cx, args.type_at(0))
+ {
+ span_lint_and_then(
+ cx,
+ TYPE_ID_ON_BOX,
+ call_span,
+ "calling `.type_id()` on a `Box<dyn Any>`",
+ |diag| {
+ let derefs = recv_adjusts
+ .iter()
+ .filter(|adj| matches!(adj.kind, Adjust::Deref(None)))
+ .count();
+
+ let mut sugg = "*".repeat(derefs + 1);
+ sugg += &snippet(cx, receiver.span, "<expr>");
+
+ diag.note(
+ "this returns the type id of the literal type `Box<dyn Any>` instead of the \
+ type id of the boxed value, which is most likely not what you want"
+ )
+ .note(
+ "if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, \
+ which makes it more clear"
+ )
+ .span_suggestion(
+ receiver.span,
+ "consider dereferencing first",
+ format!("({sugg})"),
+ Applicability::MaybeIncorrect,
+ );
+ },
+ );
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
index a1c629473..bc9c518db 100644
--- a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs
@@ -1,5 +1,6 @@
use clippy_utils::diagnostics::span_lint;
-use clippy_utils::{is_path_diagnostic_item, ty::is_uninit_value_valid_for_ty};
+use clippy_utils::is_path_diagnostic_item;
+use clippy_utils::ty::is_uninit_value_valid_for_ty;
use if_chain::if_chain;
use rustc_hir as hir;
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs
index 1cef6226a..fabf3fa0c 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs
@@ -11,8 +11,7 @@ use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_span::sym;
-use super::UNNECESSARY_FILTER_MAP;
-use super::UNNECESSARY_FIND_MAP;
+use super::{UNNECESSARY_FILTER_MAP, UNNECESSARY_FIND_MAP};
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>, arg: &'tcx hir::Expr<'tcx>, name: &str) {
if !is_trait_method(cx, expr, sym::Iterator) {
@@ -78,6 +77,16 @@ fn check_expression<'tcx>(cx: &LateContext<'tcx>, arg_id: hir::HirId, expr: &'tc
}
(true, true)
},
+ hir::ExprKind::MethodCall(segment, recv, [arg], _) => {
+ if segment.ident.name == sym!(then_some)
+ && cx.typeck_results().expr_ty(recv).is_bool()
+ && path_to_local_id(arg, arg_id)
+ {
+ (false, true)
+ } else {
+ (true, true)
+ }
+ },
hir::ExprKind::Block(block, _) => block
.expr
.as_ref()
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
index 8ec15a1c1..6e23754bf 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs
@@ -8,7 +8,8 @@ use rustc_hir as hir;
use rustc_hir::PatKind;
use rustc_lint::LateContext;
use rustc_middle::ty;
-use rustc_span::{source_map::Span, sym};
+use rustc_span::source_map::Span;
+use rustc_span::sym;
use super::UNNECESSARY_FOLD;
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
index 52a4ff7d1..0c72c13a3 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs
@@ -5,7 +5,8 @@ use clippy_utils::source::snippet_opt;
use clippy_utils::ty::{get_iterator_item_ty, implements_trait};
use clippy_utils::{fn_def_id, get_parent_expr};
use rustc_errors::Applicability;
-use rustc_hir::{def_id::DefId, Expr, ExprKind};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_span::{sym, Symbol};
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
index 087e1e434..d0c62fb56 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, ty::is_type_lang_item};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_lang_item;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem};
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs
index ea9b894b6..937aac8d2 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint_and_then, is_res_lang_ctor, last_path_segment, path_res, MaybePath};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::{is_res_lang_ctor, last_path_segment, path_res, MaybePath};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
@@ -32,6 +33,11 @@ pub(super) fn check(
args: &[hir::Expr<'_>],
) {
let init = clippy_utils::expr_or_init(cx, recv);
+ if init.span.from_expansion() {
+ // don't lint if the receiver or binding initializer comes from a macro
+ // (e.g. `let x = option_env!(..); x.unwrap()`)
+ return;
+ }
let (constructor, call_args, ty) = if let hir::ExprKind::Call(call, call_args) = init.kind {
let Some(qpath) = call.qpath_opt() else { return };
@@ -65,6 +71,22 @@ pub(super) fn check(
(expr.span.with_hi(args[0].span.lo()), "panic!(".to_string()),
(expr.span.with_lo(args[0].span.hi()), ")".to_string()),
]),
+ ("Some" | "Ok", "unwrap_unchecked", _) | ("Err", "unwrap_err_unchecked", _) => {
+ let mut suggs = vec![
+ (recv.span.with_hi(call_args[0].span.lo()), String::new()),
+ (expr.span.with_lo(call_args[0].span.hi()), String::new()),
+ ];
+ // try to also remove the unsafe block if present
+ if let hir::Node::Block(block) = cx.tcx.hir().get_parent(expr.hir_id)
+ && let hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::UserProvided) = block.rules
+ {
+ suggs.extend([
+ (block.span.shrink_to_lo().to(expr.span.shrink_to_lo()), String::new()),
+ (expr.span.shrink_to_hi().to(block.span.shrink_to_hi()), String::new())
+ ]);
+ }
+ Some(suggs)
+ },
("None", "unwrap_or_default", _) => {
let ty = cx.typeck_results().expr_ty(expr);
let default_ty_string = if let ty::Adt(def, ..) = ty.kind() {
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
index 67618f703..e62a65a27 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs
@@ -6,7 +6,7 @@ use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Closure, Expr, ExprKind, Mutability, Param, Pat, PatKind, Path, PathSegment, QPath};
use rustc_lint::LateContext;
-use rustc_middle::ty::{self, subst::GenericArgKind};
+use rustc_middle::ty::{self, GenericArgKind};
use rustc_span::sym;
use rustc_span::symbol::Ident;
use std::iter;
@@ -118,7 +118,7 @@ fn detect_lint(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Exp
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if cx.tcx.type_of(impl_id).subst_identity().is_slice();
+ if cx.tcx.type_of(impl_id).instantiate_identity().is_slice();
if let ExprKind::Closure(&Closure { body, .. }) = arg.kind;
if let closure_body = cx.tcx.hir().body(body);
if let &[
diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
index 6bd5e9e88..5c5ee2620 100644
--- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs
@@ -7,16 +7,20 @@ use clippy_utils::ty::{get_iterator_item_ty, implements_trait, is_copy, peel_mid
use clippy_utils::visitors::find_all_ret_expressions;
use clippy_utils::{fn_def_id, get_parent_expr, is_diag_item_method, is_diag_trait_item, return_ty};
use rustc_errors::Applicability;
-use rustc_hir::{def_id::DefId, BorrowKind, Expr, ExprKind, ItemKind, Node};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{BorrowKind, Expr, ExprKind, ItemKind, Node};
use rustc_hir_typeck::{FnCtxt, Inherited};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
use rustc_middle::mir::Mutability;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref};
-use rustc_middle::ty::subst::{GenericArg, GenericArgKind, SubstsRef};
-use rustc_middle::ty::{self, ClauseKind, EarlyBinder, ParamTy, ProjectionPredicate, TraitPredicate, Ty};
+use rustc_middle::ty::{
+ self, ClauseKind, EarlyBinder, GenericArg, GenericArgKind, GenericArgsRef, ParamTy, ProjectionPredicate,
+ TraitPredicate, Ty,
+};
use rustc_span::{sym, Symbol};
-use rustc_trait_selection::traits::{query::evaluate_obligation::InferCtxtExt as _, Obligation, ObligationCause};
+use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
+use rustc_trait_selection::traits::{Obligation, ObligationCause};
use super::UNNECESSARY_TO_OWNED;
@@ -250,8 +254,8 @@ fn check_other_call_arg<'tcx>(
) -> bool {
if_chain! {
if let Some((maybe_call, maybe_arg)) = skip_addr_of_ancestors(cx, expr);
- if let Some((callee_def_id, _, recv, call_args)) = get_callee_substs_and_args(cx, maybe_call);
- let fn_sig = cx.tcx.fn_sig(callee_def_id).subst_identity().skip_binder();
+ if let Some((callee_def_id, _, recv, call_args)) = get_callee_generic_args_and_args(cx, maybe_call);
+ let fn_sig = cx.tcx.fn_sig(callee_def_id).instantiate_identity().skip_binder();
if let Some(i) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == maybe_arg.hir_id);
if let Some(input) = fn_sig.inputs().get(i);
let (input, n_refs) = peel_mid_ty_refs(*input);
@@ -315,26 +319,31 @@ fn skip_addr_of_ancestors<'tcx>(
}
/// Checks whether an expression is a function or method call and, if so, returns its `DefId`,
-/// `Substs`, and arguments.
-fn get_callee_substs_and_args<'tcx>(
+/// `GenericArgs`, and arguments.
+fn get_callee_generic_args_and_args<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx Expr<'tcx>,
-) -> Option<(DefId, SubstsRef<'tcx>, Option<&'tcx Expr<'tcx>>, &'tcx [Expr<'tcx>])> {
+) -> Option<(
+ DefId,
+ GenericArgsRef<'tcx>,
+ Option<&'tcx Expr<'tcx>>,
+ &'tcx [Expr<'tcx>],
+)> {
if_chain! {
if let ExprKind::Call(callee, args) = expr.kind;
let callee_ty = cx.typeck_results().expr_ty(callee);
if let ty::FnDef(callee_def_id, _) = callee_ty.kind();
then {
- let substs = cx.typeck_results().node_substs(callee.hir_id);
- return Some((*callee_def_id, substs, None, args));
+ let generic_args = cx.typeck_results().node_args(callee.hir_id);
+ return Some((*callee_def_id, generic_args, None, args));
}
}
if_chain! {
if let ExprKind::MethodCall(_, recv, args, _) = expr.kind;
if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
then {
- let substs = cx.typeck_results().node_substs(expr.hir_id);
- return Some((method_def_id, substs, Some(recv), args));
+ let generic_args = cx.typeck_results().node_args(expr.hir_id);
+ return Some((method_def_id, generic_args, Some(recv), args));
}
}
None
@@ -388,17 +397,18 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty<
}
}
Node::Expr(parent_expr) => {
- if let Some((callee_def_id, call_substs, recv, call_args)) = get_callee_substs_and_args(cx, parent_expr)
+ if let Some((callee_def_id, call_generic_args, recv, call_args))
+ = get_callee_generic_args_and_args(cx, parent_expr)
{
- // FIXME: the `subst_identity()` below seems incorrect, since we eventually
+ // FIXME: the `instantiate_identity()` below seems incorrect, since we eventually
// call `tcx.try_subst_and_normalize_erasing_regions` further down
// (i.e., we are explicitly not in the identity context).
- let fn_sig = cx.tcx.fn_sig(callee_def_id).subst_identity().skip_binder();
+ let fn_sig = cx.tcx.fn_sig(callee_def_id).instantiate_identity().skip_binder();
if let Some(arg_index) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == expr.hir_id)
&& let Some(param_ty) = fn_sig.inputs().get(arg_index)
&& let ty::Param(ParamTy { index: param_index , ..}) = param_ty.kind()
// https://github.com/rust-lang/rust-clippy/issues/9504 and https://github.com/rust-lang/rust-clippy/issues/10021
- && (*param_index as usize) < call_substs.len()
+ && (*param_index as usize) < call_generic_args.len()
{
if fn_sig
.inputs()
@@ -422,8 +432,8 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty<
}
});
- let new_subst = cx.tcx.mk_substs_from_iter(
- call_substs.iter()
+ let new_subst = cx.tcx.mk_args_from_iter(
+ call_generic_args.iter()
.enumerate()
.map(|(i, t)|
if i == (*param_index as usize) {
@@ -433,7 +443,7 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty<
}));
if trait_predicates.any(|predicate| {
- let predicate = EarlyBinder::bind(predicate).subst(cx.tcx, new_subst);
+ let predicate = EarlyBinder::bind(predicate).instantiate(cx.tcx, new_subst);
let obligation = Obligation::new(cx.tcx, ObligationCause::dummy(), cx.param_env, predicate);
!cx.tcx.infer_ctxt().build().predicate_must_hold_modulo_regions(&obligation)
}) {
@@ -500,8 +510,8 @@ fn is_to_string_on_string_like<'a>(
return false;
}
- if let Some(substs) = cx.typeck_results().node_substs_opt(call_expr.hir_id)
- && let [generic_arg] = substs.as_slice()
+ if let Some(args) = cx.typeck_results().node_args_opt(call_expr.hir_id)
+ && let [generic_arg] = args.as_slice()
&& let GenericArgKind::Type(ty) = generic_arg.unpack()
&& let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref)
&& let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef)
diff --git a/src/tools/clippy/clippy_lints/src/methods/unwrap_expect_used.rs b/src/tools/clippy/clippy_lints/src/methods/unwrap_expect_used.rs
new file mode 100644
index 000000000..7bd16b473
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/methods/unwrap_expect_used.rs
@@ -0,0 +1,83 @@
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::{is_never_like, is_type_diagnostic_item};
+use clippy_utils::{is_in_cfg_test, is_in_test_function, is_lint_allowed};
+use rustc_hir::Expr;
+use rustc_lint::{LateContext, Lint};
+use rustc_middle::ty;
+use rustc_span::sym;
+
+use super::{EXPECT_USED, UNWRAP_USED};
+
+#[derive(Clone, Copy, Eq, PartialEq)]
+pub(super) enum Variant {
+ Unwrap,
+ Expect,
+}
+
+impl Variant {
+ fn method_name(self, is_err: bool) -> &'static str {
+ match (self, is_err) {
+ (Variant::Unwrap, true) => "unwrap_err",
+ (Variant::Unwrap, false) => "unwrap",
+ (Variant::Expect, true) => "expect_err",
+ (Variant::Expect, false) => "expect",
+ }
+ }
+
+ fn lint(self) -> &'static Lint {
+ match self {
+ Variant::Unwrap => UNWRAP_USED,
+ Variant::Expect => EXPECT_USED,
+ }
+ }
+}
+
+/// Lint usage of `unwrap` or `unwrap_err` for `Result` and `unwrap()` for `Option` (and their
+/// `expect` counterparts).
+pub(super) fn check(
+ cx: &LateContext<'_>,
+ expr: &Expr<'_>,
+ recv: &Expr<'_>,
+ is_err: bool,
+ allow_unwrap_in_tests: bool,
+ variant: Variant,
+) {
+ let ty = cx.typeck_results().expr_ty(recv).peel_refs();
+
+ let (kind, none_value, none_prefix) = if is_type_diagnostic_item(cx, ty, sym::Option) && !is_err {
+ ("an `Option`", "None", "")
+ } else if is_type_diagnostic_item(cx, ty, sym::Result)
+ && let ty::Adt(_, substs) = ty.kind()
+ && let Some(t_or_e_ty) = substs[usize::from(!is_err)].as_type()
+ {
+ if is_never_like(t_or_e_ty) {
+ return;
+ }
+
+ ("a `Result`", if is_err { "Ok" } else { "Err" }, "an ")
+ } else {
+ return;
+ };
+
+ let method_suffix = if is_err { "_err" } else { "" };
+
+ if allow_unwrap_in_tests && (is_in_test_function(cx.tcx, expr.hir_id) || is_in_cfg_test(cx.tcx, expr.hir_id)) {
+ return;
+ }
+
+ span_lint_and_then(
+ cx,
+ variant.lint(),
+ expr.span,
+ &format!("used `{}()` on {kind} value", variant.method_name(is_err)),
+ |diag| {
+ diag.note(format!("if this value is {none_prefix}`{none_value}`, it will panic"));
+
+ if variant == Variant::Unwrap && is_lint_allowed(cx, EXPECT_USED, expr.hir_id) {
+ diag.help(format!(
+ "consider using `expect{method_suffix}()` to provide a better panic message"
+ ));
+ }
+ },
+ );
+}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs b/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs
deleted file mode 100644
index 045f739e6..000000000
--- a/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs
+++ /dev/null
@@ -1,66 +0,0 @@
-//! Lint for `some_result_or_option.unwrap_or_else(Default::default)`
-
-use super::UNWRAP_OR_ELSE_DEFAULT;
-use clippy_utils::{
- diagnostics::span_lint_and_sugg, is_default_equivalent_call, source::snippet_with_applicability,
- ty::is_type_diagnostic_item,
-};
-use rustc_ast::ast::LitKind;
-use rustc_errors::Applicability;
-use rustc_hir as hir;
-use rustc_lint::LateContext;
-use rustc_span::{sym, symbol};
-
-pub(super) fn check<'tcx>(
- cx: &LateContext<'tcx>,
- expr: &'tcx hir::Expr<'_>,
- recv: &'tcx hir::Expr<'_>,
- u_arg: &'tcx hir::Expr<'_>,
-) {
- // something.unwrap_or_else(Default::default)
- // ^^^^^^^^^- recv ^^^^^^^^^^^^^^^^- u_arg
- // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- expr
- let recv_ty = cx.typeck_results().expr_ty(recv);
- let is_option = is_type_diagnostic_item(cx, recv_ty, sym::Option);
- let is_result = is_type_diagnostic_item(cx, recv_ty, sym::Result);
-
- if_chain! {
- if is_option || is_result;
- if closure_body_returns_empty_to_string(cx, u_arg) || is_default_equivalent_call(cx, u_arg);
- then {
- let mut applicability = Applicability::MachineApplicable;
-
- span_lint_and_sugg(
- cx,
- UNWRAP_OR_ELSE_DEFAULT,
- expr.span,
- "use of `.unwrap_or_else(..)` to construct default value",
- "try",
- format!(
- "{}.unwrap_or_default()",
- snippet_with_applicability(cx, recv.span, "..", &mut applicability)
- ),
- applicability,
- );
- }
- }
-}
-
-fn closure_body_returns_empty_to_string(cx: &LateContext<'_>, e: &hir::Expr<'_>) -> bool {
- if let hir::ExprKind::Closure(&hir::Closure { body, .. }) = e.kind {
- let body = cx.tcx.hir().body(body);
-
- if body.params.is_empty()
- && let hir::Expr{ kind, .. } = &body.value
- && let hir::ExprKind::MethodCall(hir::PathSegment {ident, ..}, self_arg, _, _) = kind
- && ident == &symbol::Ident::from_str("to_string")
- && let hir::Expr{ kind, .. } = self_arg
- && let hir::ExprKind::Lit(lit) = kind
- && let LitKind::Str(symbol::kw::Empty, _) = lit.node
- {
- return true;
- }
- }
-
- false
-}
diff --git a/src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs b/src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs
deleted file mode 100644
index 5e4c3daee..000000000
--- a/src/tools/clippy/clippy_lints/src/methods/unwrap_used.rs
+++ /dev/null
@@ -1,53 +0,0 @@
-use clippy_utils::diagnostics::span_lint_and_help;
-use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::{is_in_cfg_test, is_in_test_function, is_lint_allowed};
-use rustc_hir as hir;
-use rustc_lint::LateContext;
-use rustc_span::sym;
-
-use super::{EXPECT_USED, UNWRAP_USED};
-
-/// lint use of `unwrap()` or `unwrap_err` for `Result` and `unwrap()` for `Option`.
-pub(super) fn check(
- cx: &LateContext<'_>,
- expr: &hir::Expr<'_>,
- recv: &hir::Expr<'_>,
- is_err: bool,
- allow_unwrap_in_tests: bool,
-) {
- let obj_ty = cx.typeck_results().expr_ty(recv).peel_refs();
-
- let mess = if is_type_diagnostic_item(cx, obj_ty, sym::Option) && !is_err {
- Some((UNWRAP_USED, "an `Option`", "None", ""))
- } else if is_type_diagnostic_item(cx, obj_ty, sym::Result) {
- Some((UNWRAP_USED, "a `Result`", if is_err { "Ok" } else { "Err" }, "an "))
- } else {
- None
- };
-
- let method_suffix = if is_err { "_err" } else { "" };
-
- if allow_unwrap_in_tests && (is_in_test_function(cx.tcx, expr.hir_id) || is_in_cfg_test(cx.tcx, expr.hir_id)) {
- return;
- }
-
- if let Some((lint, kind, none_value, none_prefix)) = mess {
- let help = if is_lint_allowed(cx, EXPECT_USED, expr.hir_id) {
- format!(
- "if you don't want to handle the `{none_value}` case gracefully, consider \
- using `expect{method_suffix}()` to provide a better panic message"
- )
- } else {
- format!("if this value is {none_prefix}`{none_value}`, it will panic")
- };
-
- span_lint_and_help(
- cx,
- lint,
- expr.span,
- &format!("used `unwrap{method_suffix}()` on {kind} value"),
- None,
- &help,
- );
- }
-}
diff --git a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
index c1139d84e..b5f810edd 100644
--- a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs
@@ -37,7 +37,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: &str,
USELESS_ASREF,
expr.span,
&format!("this call to `{call_name}` does nothing"),
- "try this",
+ "try",
snippet_with_applicability(cx, recvr.span, "..", &mut applicability).to_string(),
applicability,
);
diff --git a/src/tools/clippy/clippy_lints/src/methods/utils.rs b/src/tools/clippy/clippy_lints/src/methods/utils.rs
index c96d69226..9f1f73e60 100644
--- a/src/tools/clippy/clippy_lints/src/methods/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/utils.rs
@@ -143,7 +143,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for CloneOrCopyVisitor<'cx, 'tcx> {
if_chain! {
if args.iter().all(|arg| !self.is_binding(arg));
if let Some(method_def_id) = self.cx.typeck_results().type_dependent_def_id(parent.hir_id);
- let method_ty = self.cx.tcx.type_of(method_def_id).subst_identity();
+ let method_ty = self.cx.tcx.type_of(method_def_id).instantiate_identity();
let self_ty = method_ty.fn_sig(self.cx.tcx).input(0).skip_binder();
if matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Not));
then {
diff --git a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs
index b0cfc163f..730727186 100644
--- a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs
@@ -20,7 +20,7 @@ pub(super) fn check<'tcx>(
if_chain! {
if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if let Some(impl_id) = cx.tcx.impl_of_method(method_id);
- if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).subst_identity(), sym::Vec);
+ if is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Vec);
if let ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = count_arg.kind;
if let ExprKind::Lit(Spanned { node: LitKind::Int(..), .. }) = default_arg.kind;
then {
diff --git a/src/tools/clippy/clippy_lints/src/min_ident_chars.rs b/src/tools/clippy/clippy_lints/src/min_ident_chars.rs
index d49bb0ca6..c79a1a7b9 100644
--- a/src/tools/clippy/clippy_lints/src/min_ident_chars.rs
+++ b/src/tools/clippy/clippy_lints/src/min_ident_chars.rs
@@ -1,10 +1,9 @@
-use clippy_utils::{diagnostics::span_lint, is_from_proc_macro};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::is_from_proc_macro;
use rustc_data_structures::fx::FxHashSet;
-use rustc_hir::{
- def::{DefKind, Res},
- intravisit::{walk_item, Visitor},
- GenericParamKind, HirId, Item, ItemKind, ItemLocalId, Node, Pat, PatKind,
-};
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::intravisit::{walk_item, Visitor};
+use rustc_hir::{GenericParamKind, HirId, Item, ItemKind, ItemLocalId, Node, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
@@ -25,7 +24,7 @@ declare_clippy_lint! {
/// ### Example
/// ```rust,ignore
/// for m in movies {
- /// let title = m.t;
+ /// let title = m.t;
/// }
/// ```
/// Use instead:
@@ -130,6 +129,14 @@ impl Visitor<'_> for IdentVisitor<'_, '_> {
return;
}
+ // `struct Array<T, const N: usize>([T; N])`
+ // ^
+ if let Node::GenericParam(generic_param) = node
+ && let GenericParamKind::Const { .. } = generic_param.kind
+ {
+ return;
+ }
+
if is_from_proc_macro(cx, &ident) {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
index 4dbb79334..c17f00c42 100644
--- a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs
@@ -46,7 +46,9 @@ declare_lint_pass!(MissingAssertMessage => [MISSING_ASSERT_MESSAGE]);
impl<'tcx> LateLintPass<'tcx> for MissingAssertMessage {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
let single_argument = match cx.tcx.get_diagnostic_name(macro_call.def_id) {
Some(sym::assert_macro | sym::debug_assert_macro) => true,
Some(
@@ -61,10 +63,14 @@ impl<'tcx> LateLintPass<'tcx> for MissingAssertMessage {
}
let panic_expn = if single_argument {
- let Some((_, panic_expn)) = find_assert_args(cx, expr, macro_call.expn) else { return };
+ let Some((_, panic_expn)) = find_assert_args(cx, expr, macro_call.expn) else {
+ return;
+ };
panic_expn
} else {
- let Some((_, _, panic_expn)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return };
+ let Some((_, _, panic_expn)) = find_assert_eq_args(cx, expr, macro_call.expn) else {
+ return;
+ };
panic_expn
};
diff --git a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
index 773174679..96d83e114 100644
--- a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs
@@ -1,8 +1,11 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_opt};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::Applicability;
-use rustc_hir::{def::Res, def_id::DefId, Item, ItemKind, UseKind};
+use rustc_hir::def::Res;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{Item, ItemKind, UseKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::Symbol;
diff --git a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs
index 497514fbc..2f63b9b9f 100644
--- a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs
@@ -1,23 +1,17 @@
use std::ops::ControlFlow;
-use clippy_utils::{
- diagnostics::span_lint_and_then,
- is_path_lang_item, paths,
- ty::match_type,
- visitors::{for_each_expr, Visitable},
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::match_type;
+use clippy_utils::visitors::{for_each_expr, Visitable};
+use clippy_utils::{is_path_lang_item, paths};
use rustc_ast::LitKind;
use rustc_data_structures::fx::FxHashSet;
-use rustc_hir::Block;
+use rustc_hir::def::{DefKind, Res};
use rustc_hir::{
- def::{DefKind, Res},
- Expr, ImplItemKind, LangItem, Node,
+ Block, Expr, ExprKind, Impl, ImplItem, ImplItemKind, Item, ItemKind, LangItem, Node, QPath, TyKind, VariantData,
};
-use rustc_hir::{ExprKind, Impl, ItemKind, QPath, TyKind};
-use rustc_hir::{ImplItem, Item, VariantData};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::Ty;
-use rustc_middle::ty::TypeckResults;
+use rustc_middle::ty::{Ty, TypeckResults};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{sym, Span, Symbol};
diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs
index a41d5a9ce..93f6025c7 100644
--- a/src/tools/clippy/clippy_lints/src/missing_inline.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs
@@ -74,7 +74,6 @@ fn is_executable_or_proc_macro(cx: &LateContext<'_>) -> bool {
use rustc_session::config::CrateType;
cx.tcx
- .sess
.crate_types()
.iter()
.any(|t: &CrateType| matches!(t, CrateType::Executable | CrateType::ProcMacro))
diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
index 57ec3a1f1..367cd6bd4 100644
--- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
+++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs
@@ -239,7 +239,7 @@ fn check_expr<'tcx>(vis: &mut ReadVisitor<'_, 'tcx>, expr: &'tcx Expr<'_>) -> St
| ExprKind::MethodCall(..)
| ExprKind::Call(_, _)
| ExprKind::Assign(..)
- | ExprKind::Index(_, _)
+ | ExprKind::Index(..)
| ExprKind::Repeat(_, _)
| ExprKind::Struct(_, _, _) => {
walk_expr(vis, expr);
diff --git a/src/tools/clippy/clippy_lints/src/module_style.rs b/src/tools/clippy/clippy_lints/src/module_style.rs
index 439cae812..efdc7560e 100644
--- a/src/tools/clippy/clippy_lints/src/module_style.rs
+++ b/src/tools/clippy/clippy_lints/src/module_style.rs
@@ -80,7 +80,9 @@ impl EarlyLintPass for ModStyle {
let files = cx.sess().source_map().files();
- let Some(trim_to_src) = cx.sess().opts.working_dir.local_path() else { return };
+ let Some(trim_to_src) = cx.sess().opts.working_dir.local_path() else {
+ return;
+ };
// `folder_segments` is all unique folder path segments `path/to/foo.rs` gives
// `[path, to]` but not foo
diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
index e6fd65f00..fe35126aa 100644
--- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
+++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs
@@ -1,12 +1,8 @@
-use clippy_utils::{
- diagnostics::span_lint_and_then,
- visitors::{for_each_expr_with_closures, Descend, Visitable},
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::visitors::{for_each_expr_with_closures, Descend, Visitable};
use core::ops::ControlFlow::Continue;
-use hir::{
- def::{DefKind, Res},
- BlockCheckMode, ExprKind, QPath, UnOp, Unsafety,
-};
+use hir::def::{DefKind, Res};
+use hir::{BlockCheckMode, ExprKind, QPath, UnOp, Unsafety};
use rustc_ast::Mutability;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/mut_key.rs b/src/tools/clippy/clippy_lints/src/mut_key.rs
index 309f67521..5878f8995 100644
--- a/src/tools/clippy/clippy_lints/src/mut_key.rs
+++ b/src/tools/clippy/clippy_lints/src/mut_key.rs
@@ -139,7 +139,7 @@ impl MutableKeyType {
}
fn check_sig(&self, cx: &LateContext<'_>, fn_def_id: LocalDefId, decl: &hir::FnDecl<'_>) {
- let fn_sig = cx.tcx.fn_sig(fn_def_id).subst_identity();
+ let fn_sig = cx.tcx.fn_sig(fn_def_id).instantiate_identity();
for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) {
self.check_ty_(cx, hir_ty.span, *ty);
}
@@ -150,7 +150,7 @@ impl MutableKeyType {
// generics (because the compiler cannot ensure immutability for unknown types).
fn check_ty_<'tcx>(&self, cx: &LateContext<'tcx>, span: Span, ty: Ty<'tcx>) {
let ty = ty.peel_refs();
- if let Adt(def, substs) = ty.kind() {
+ if let Adt(def, args) = ty.kind() {
let is_keyed_type = [sym::HashMap, sym::BTreeMap, sym::HashSet, sym::BTreeSet]
.iter()
.any(|diag_item| cx.tcx.is_diagnostic_item(*diag_item, def.did()));
@@ -158,7 +158,7 @@ impl MutableKeyType {
return;
}
- let subst_ty = substs.type_at(0);
+ let subst_ty = args.type_at(0);
// Determines if a type contains interior mutability which would affect its implementation of
// [`Hash`] or [`Ord`].
if is_interior_mut_ty(cx, subst_ty)
diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs
index e91aac41b..e53e146ec 100644
--- a/src/tools/clippy/clippy_lints/src/mut_reference.rs
+++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs
@@ -37,6 +37,11 @@ declare_lint_pass!(UnnecessaryMutPassed => [UNNECESSARY_MUT_PASSED]);
impl<'tcx> LateLintPass<'tcx> for UnnecessaryMutPassed {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
+ if e.span.from_expansion() {
+ // Issue #11268
+ return;
+ }
+
match e.kind {
ExprKind::Call(fn_expr, arguments) => {
if let ExprKind::Path(ref path) = fn_expr.kind {
@@ -51,8 +56,8 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryMutPassed {
},
ExprKind::MethodCall(path, receiver, arguments, _) => {
let def_id = cx.typeck_results().type_dependent_def_id(e.hir_id).unwrap();
- let substs = cx.typeck_results().node_substs(e.hir_id);
- let method_type = cx.tcx.type_of(def_id).subst(cx.tcx, substs);
+ let args = cx.typeck_results().node_args(e.hir_id);
+ let method_type = cx.tcx.type_of(def_id).instantiate(cx.tcx, args);
check_arguments(
cx,
std::iter::once(receiver).chain(arguments.iter()).collect(),
diff --git a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
index d8647a991..dea432fdb 100644
--- a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
+++ b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs
@@ -39,7 +39,9 @@ declare_lint_pass!(DebugAssertWithMutCall => [DEBUG_ASSERT_WITH_MUT_CALL]);
impl<'tcx> LateLintPass<'tcx> for DebugAssertWithMutCall {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, e) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, e) else {
+ return;
+ };
let macro_name = cx.tcx.item_name(macro_call.def_id);
if !matches!(
macro_name.as_str(),
@@ -47,7 +49,9 @@ impl<'tcx> LateLintPass<'tcx> for DebugAssertWithMutCall {
) {
return;
}
- let Some((lhs, rhs, _)) = find_assert_eq_args(cx, e, macro_call.expn) else { return };
+ let Some((lhs, rhs, _)) = find_assert_eq_args(cx, e, macro_call.expn) else {
+ return;
+ };
for arg in [lhs, rhs] {
let mut visitor = MutArgVisitor::new(cx);
visitor.visit_expr(arg);
diff --git a/src/tools/clippy/clippy_lints/src/needless_bool.rs b/src/tools/clippy/clippy_lints/src/needless_bool.rs
index 62af42a39..f6b87b071 100644
--- a/src/tools/clippy/clippy_lints/src/needless_bool.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_bool.rs
@@ -6,9 +6,9 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::sugg::Sugg;
use clippy_utils::{
- get_parent_node, is_else_clause, is_expn_of, peel_blocks, peel_blocks_with_stmt, span_extract_comment,
+ get_parent_node, higher, is_else_clause, is_expn_of, peel_blocks, peel_blocks_with_stmt, span_extract_comment,
+ SpanlessEq,
};
-use clippy_utils::{higher, SpanlessEq};
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, Node, UnOp};
@@ -106,7 +106,7 @@ declare_clippy_lint! {
/// # let mut skip: bool;
/// skip = !must_keep(x, y);
/// ```
- #[clippy::version = "1.69.0"]
+ #[clippy::version = "1.71.0"]
pub NEEDLESS_BOOL_ASSIGN,
complexity,
"setting the same boolean variable in both branches of an if-statement"
@@ -119,7 +119,7 @@ fn condition_needs_parentheses(e: &Expr<'_>) -> bool {
| ExprKind::Call(i, _)
| ExprKind::Cast(i, _)
| ExprKind::Type(i, _)
- | ExprKind::Index(i, _) = inner.kind
+ | ExprKind::Index(i, _, _) = inner.kind
{
if matches!(
i.kind,
diff --git a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
index 498e1408e..11bf9e9ca 100644
--- a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs
@@ -52,7 +52,9 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessBorrowedRef {
}
// Only lint immutable refs, because `&mut ref T` may be useful.
- let PatKind::Ref(pat, Mutability::Not) = ref_pat.kind else { return };
+ let PatKind::Ref(pat, Mutability::Not) = ref_pat.kind else {
+ return;
+ };
match pat.kind {
// Check sub_pat got a `ref` keyword (excluding `ref mut`).
diff --git a/src/tools/clippy/clippy_lints/src/needless_else.rs b/src/tools/clippy/clippy_lints/src/needless_else.rs
index 4ff1bf7ff..03bab86c6 100644
--- a/src/tools/clippy/clippy_lints/src/needless_else.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_else.rs
@@ -1,5 +1,5 @@
-use clippy_utils::source::snippet_opt;
-use clippy_utils::{diagnostics::span_lint_and_sugg, source::trim_span};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::{snippet_opt, trim_span};
use rustc_ast::ast::{Expr, ExprKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
@@ -51,7 +51,7 @@ impl EarlyLintPass for NeedlessElse {
cx,
NEEDLESS_ELSE,
span,
- "this else branch is empty",
+ "this `else` branch is empty",
"you can remove it",
String::new(),
Applicability::MachineApplicable,
diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs
index c3b633fd6..98bf122fa 100644
--- a/src/tools/clippy/clippy_lints/src/needless_for_each.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs
@@ -1,11 +1,10 @@
use rustc_errors::Applicability;
-use rustc_hir::{
- intravisit::{walk_expr, Visitor},
- Closure, Expr, ExprKind, Stmt, StmtKind,
-};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{Closure, Expr, ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{source_map::Span, sym, Symbol};
+use rustc_span::source_map::Span;
+use rustc_span::{sym, Symbol};
use if_chain::if_chain;
@@ -50,7 +49,7 @@ declare_lint_pass!(NeedlessForEach => [NEEDLESS_FOR_EACH]);
impl<'tcx> LateLintPass<'tcx> for NeedlessForEach {
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
let (StmtKind::Expr(expr) | StmtKind::Semi(expr)) = stmt.kind else {
- return
+ return;
};
if_chain! {
diff --git a/src/tools/clippy/clippy_lints/src/needless_if.rs b/src/tools/clippy/clippy_lints/src/needless_if.rs
index ad5c3e1dc..1ed7ea6b3 100644
--- a/src/tools/clippy/clippy_lints/src/needless_if.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_if.rs
@@ -1,4 +1,7 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, higher::If, is_from_proc_macro, source::snippet_opt};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::higher::If;
+use clippy_utils::is_from_proc_macro;
+use clippy_utils::source::snippet_opt;
use rustc_errors::Applicability;
use rustc_hir::{ExprKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
diff --git a/src/tools/clippy/clippy_lints/src/needless_late_init.rs b/src/tools/clippy/clippy_lints/src/needless_late_init.rs
index 5a9387b34..948454d13 100644
--- a/src/tools/clippy/clippy_lints/src/needless_late_init.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_late_init.rs
@@ -86,7 +86,9 @@ fn contains_let(cond: &Expr<'_>) -> bool {
}
fn stmt_needs_ordered_drop(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool {
- let StmtKind::Local(local) = stmt.kind else { return false };
+ let StmtKind::Local(local) = stmt.kind else {
+ return false;
+ };
!local.pat.walk_short(|pat| {
if let PatKind::Binding(.., None) = pat.kind {
!needs_ordered_drop(cx, cx.typeck_results().pat_ty(pat))
diff --git a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
index da1b9d999..d17a383e8 100644
--- a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs
@@ -1,8 +1,6 @@
-use clippy_utils::{
- diagnostics::span_lint_and_then,
- higher,
- source::{snippet, snippet_with_applicability},
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::higher;
+use clippy_utils::source::{snippet, snippet_with_applicability};
use rustc_ast::ast;
use rustc_errors::Applicability;
diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs
new file mode 100644
index 000000000..7f0a5964a
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs
@@ -0,0 +1,441 @@
+use super::needless_pass_by_value::requires_exact_signature;
+use clippy_utils::diagnostics::span_lint_hir_and_then;
+use clippy_utils::source::snippet;
+use clippy_utils::{get_parent_node, inherits_cfg, is_from_proc_macro, is_self};
+use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
+use rustc_errors::Applicability;
+use rustc_hir::intravisit::{walk_qpath, FnKind, Visitor};
+use rustc_hir::{
+ Body, Closure, Expr, ExprKind, FnDecl, HirId, HirIdMap, HirIdSet, Impl, ItemKind, Mutability, Node, PatKind, QPath,
+};
+use rustc_hir_typeck::expr_use_visitor as euv;
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_lint::{LateContext, LateLintPass};
+use rustc_middle::hir::map::associated_body;
+use rustc_middle::hir::nested_filter::OnlyBodies;
+use rustc_middle::mir::FakeReadCause;
+use rustc_middle::ty::{self, Ty, TyCtxt, UpvarId, UpvarPath};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::LocalDefId;
+use rustc_span::symbol::kw;
+use rustc_span::Span;
+use rustc_target::spec::abi::Abi;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Check if a `&mut` function argument is actually used mutably.
+ ///
+ /// Be careful if the function is publicly reexported as it would break compatibility with
+ /// users of this function.
+ ///
+ /// ### Why is this bad?
+ /// Less `mut` means less fights with the borrow checker. It can also lead to more
+ /// opportunities for parallelization.
+ ///
+ /// ### Example
+ /// ```rust
+ /// fn foo(y: &mut i32) -> i32 {
+ /// 12 + *y
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// fn foo(y: &i32) -> i32 {
+ /// 12 + *y
+ /// }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub NEEDLESS_PASS_BY_REF_MUT,
+ nursery,
+ "using a `&mut` argument when it's not mutated"
+}
+
+#[derive(Clone)]
+pub struct NeedlessPassByRefMut<'tcx> {
+ avoid_breaking_exported_api: bool,
+ used_fn_def_ids: FxHashSet<LocalDefId>,
+ fn_def_ids_to_maybe_unused_mut: FxIndexMap<LocalDefId, Vec<rustc_hir::Ty<'tcx>>>,
+}
+
+impl NeedlessPassByRefMut<'_> {
+ pub fn new(avoid_breaking_exported_api: bool) -> Self {
+ Self {
+ avoid_breaking_exported_api,
+ used_fn_def_ids: FxHashSet::default(),
+ fn_def_ids_to_maybe_unused_mut: FxIndexMap::default(),
+ }
+ }
+}
+
+impl_lint_pass!(NeedlessPassByRefMut<'_> => [NEEDLESS_PASS_BY_REF_MUT]);
+
+fn should_skip<'tcx>(
+ cx: &LateContext<'tcx>,
+ input: rustc_hir::Ty<'tcx>,
+ ty: Ty<'_>,
+ arg: &rustc_hir::Param<'_>,
+) -> bool {
+ // We check if this a `&mut`. `ref_mutability` returns `None` if it's not a reference.
+ if !matches!(ty.ref_mutability(), Some(Mutability::Mut)) {
+ return true;
+ }
+
+ if is_self(arg) {
+ return true;
+ }
+
+ if let PatKind::Binding(.., name, _) = arg.pat.kind {
+ // If it's a potentially unused variable, we don't check it.
+ if name.name == kw::Underscore || name.as_str().starts_with('_') {
+ return true;
+ }
+ }
+
+ // All spans generated from a proc-macro invocation are the same...
+ is_from_proc_macro(cx, &input)
+}
+
+impl<'tcx> LateLintPass<'tcx> for NeedlessPassByRefMut<'tcx> {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'tcx>,
+ kind: FnKind<'tcx>,
+ decl: &'tcx FnDecl<'tcx>,
+ body: &'tcx Body<'_>,
+ span: Span,
+ fn_def_id: LocalDefId,
+ ) {
+ if span.from_expansion() {
+ return;
+ }
+
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def_id);
+ let is_async = match kind {
+ FnKind::ItemFn(.., header) => {
+ let attrs = cx.tcx.hir().attrs(hir_id);
+ if header.abi != Abi::Rust || requires_exact_signature(attrs) {
+ return;
+ }
+ header.is_async()
+ },
+ FnKind::Method(.., sig) => sig.header.is_async(),
+ FnKind::Closure => return,
+ };
+
+ // Exclude non-inherent impls
+ if let Some(Node::Item(item)) = cx.tcx.hir().find_parent(hir_id) {
+ if matches!(
+ item.kind,
+ ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..)
+ ) {
+ return;
+ }
+ }
+
+ let fn_sig = cx.tcx.fn_sig(fn_def_id).instantiate_identity();
+ let fn_sig = cx.tcx.liberate_late_bound_regions(fn_def_id.to_def_id(), fn_sig);
+
+ // If there are no `&mut` argument, no need to go any further.
+ let mut it = decl
+ .inputs
+ .iter()
+ .zip(fn_sig.inputs())
+ .zip(body.params)
+ .filter(|((&input, &ty), arg)| !should_skip(cx, input, ty, arg))
+ .peekable();
+ if it.peek().is_none() {
+ return;
+ }
+ // Collect variables mutably used and spans which will need dereferencings from the
+ // function body.
+ let MutablyUsedVariablesCtxt { mutably_used_vars, .. } = {
+ let mut ctx = MutablyUsedVariablesCtxt {
+ mutably_used_vars: HirIdSet::default(),
+ prev_bind: None,
+ prev_move_to_closure: HirIdSet::default(),
+ aliases: HirIdMap::default(),
+ async_closures: FxHashSet::default(),
+ tcx: cx.tcx,
+ };
+ let infcx = cx.tcx.infer_ctxt().build();
+ euv::ExprUseVisitor::new(&mut ctx, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body);
+ if is_async {
+ let mut checked_closures = FxHashSet::default();
+ while !ctx.async_closures.is_empty() {
+ let closures = ctx.async_closures.clone();
+ ctx.async_closures.clear();
+ let hir = cx.tcx.hir();
+ for closure in closures {
+ if !checked_closures.insert(closure) {
+ continue;
+ }
+ ctx.prev_bind = None;
+ ctx.prev_move_to_closure.clear();
+ if let Some(body) = hir
+ .find_by_def_id(closure)
+ .and_then(associated_body)
+ .map(|(_, body_id)| hir.body(body_id))
+ {
+ euv::ExprUseVisitor::new(&mut ctx, &infcx, closure, cx.param_env, cx.typeck_results())
+ .consume_body(body);
+ }
+ }
+ }
+ }
+ ctx
+ };
+ for ((&input, &_), arg) in it {
+ // Only take `&mut` arguments.
+ if let PatKind::Binding(_, canonical_id, ..) = arg.pat.kind
+ && !mutably_used_vars.contains(&canonical_id)
+ {
+ self.fn_def_ids_to_maybe_unused_mut.entry(fn_def_id).or_default().push(input);
+ }
+ }
+ }
+
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ cx.tcx.hir().visit_all_item_likes_in_crate(&mut FnNeedsMutVisitor {
+ cx,
+ used_fn_def_ids: &mut self.used_fn_def_ids,
+ });
+
+ for (fn_def_id, unused) in self
+ .fn_def_ids_to_maybe_unused_mut
+ .iter()
+ .filter(|(def_id, _)| !self.used_fn_def_ids.contains(def_id))
+ {
+ let show_semver_warning =
+ self.avoid_breaking_exported_api && cx.effective_visibilities.is_exported(*fn_def_id);
+
+ let mut is_cfged = None;
+ for input in unused {
+ // If the argument is never used mutably, we emit the warning.
+ let sp = input.span;
+ if let rustc_hir::TyKind::Ref(_, inner_ty) = input.kind {
+ let is_cfged = is_cfged.get_or_insert_with(|| inherits_cfg(cx.tcx, *fn_def_id));
+ span_lint_hir_and_then(
+ cx,
+ NEEDLESS_PASS_BY_REF_MUT,
+ cx.tcx.hir().local_def_id_to_hir_id(*fn_def_id),
+ sp,
+ "this argument is a mutable reference, but not used mutably",
+ |diag| {
+ diag.span_suggestion(
+ sp,
+ "consider changing to".to_string(),
+ format!("&{}", snippet(cx, cx.tcx.hir().span(inner_ty.ty.hir_id), "_"),),
+ Applicability::Unspecified,
+ );
+ if show_semver_warning {
+ diag.warn("changing this function will impact semver compatibility");
+ }
+ if *is_cfged {
+ diag.note("this is cfg-gated and may require further changes");
+ }
+ },
+ );
+ }
+ }
+ }
+ }
+}
+
+struct MutablyUsedVariablesCtxt<'tcx> {
+ mutably_used_vars: HirIdSet,
+ prev_bind: Option<HirId>,
+ prev_move_to_closure: HirIdSet,
+ aliases: HirIdMap<HirId>,
+ async_closures: FxHashSet<LocalDefId>,
+ tcx: TyCtxt<'tcx>,
+}
+
+impl<'tcx> MutablyUsedVariablesCtxt<'tcx> {
+ fn add_mutably_used_var(&mut self, mut used_id: HirId) {
+ while let Some(id) = self.aliases.get(&used_id) {
+ self.mutably_used_vars.insert(used_id);
+ used_id = *id;
+ }
+ self.mutably_used_vars.insert(used_id);
+ }
+
+ fn would_be_alias_cycle(&self, alias: HirId, mut target: HirId) -> bool {
+ while let Some(id) = self.aliases.get(&target) {
+ if *id == alias {
+ return true;
+ }
+ target = *id;
+ }
+ false
+ }
+
+ fn add_alias(&mut self, alias: HirId, target: HirId) {
+ // This is to prevent alias loop.
+ if alias == target || self.would_be_alias_cycle(alias, target) {
+ return;
+ }
+ self.aliases.insert(alias, target);
+ }
+}
+
+impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> {
+ fn consume(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId) {
+ if let euv::Place {
+ base:
+ euv::PlaceBase::Local(vid)
+ | euv::PlaceBase::Upvar(UpvarId {
+ var_path: UpvarPath { hir_id: vid },
+ ..
+ }),
+ base_ty,
+ ..
+ } = &cmt.place
+ {
+ if let Some(bind_id) = self.prev_bind.take() {
+ if bind_id != *vid {
+ self.add_alias(bind_id, *vid);
+ }
+ } else if !self.prev_move_to_closure.contains(vid)
+ && matches!(base_ty.ref_mutability(), Some(Mutability::Mut))
+ {
+ self.add_mutably_used_var(*vid);
+ }
+ self.prev_bind = None;
+ self.prev_move_to_closure.remove(vid);
+ }
+ }
+
+ fn borrow(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId, borrow: ty::BorrowKind) {
+ self.prev_bind = None;
+ if let euv::Place {
+ base: euv::PlaceBase::Local(vid),
+ base_ty,
+ ..
+ } = &cmt.place
+ {
+ // If this is a mutable borrow, it was obviously used mutably so we add it. However
+ // for `UniqueImmBorrow`, it's interesting because if you do: `array[0] = value` inside
+ // a closure, it'll return this variant whereas if you have just an index access, it'll
+ // return `ImmBorrow`. So if there is "Unique" and it's a mutable reference, we add it
+ // to the mutably used variables set.
+ if borrow == ty::BorrowKind::MutBorrow
+ || (borrow == ty::BorrowKind::UniqueImmBorrow && base_ty.ref_mutability() == Some(Mutability::Mut))
+ {
+ self.add_mutably_used_var(*vid);
+ }
+ } else if borrow == ty::ImmBorrow {
+ // If there is an `async block`, it'll contain a call to a closure which we need to
+ // go into to ensure all "mutate" checks are found.
+ if let Node::Expr(Expr {
+ kind:
+ ExprKind::Call(
+ _,
+ [
+ Expr {
+ kind: ExprKind::Closure(Closure { def_id, .. }),
+ ..
+ },
+ ],
+ ),
+ ..
+ }) = self.tcx.hir().get(cmt.hir_id)
+ {
+ self.async_closures.insert(*def_id);
+ }
+ }
+ }
+
+ fn mutate(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId) {
+ self.prev_bind = None;
+ if let euv::Place {
+ projections,
+ base:
+ euv::PlaceBase::Local(vid)
+ | euv::PlaceBase::Upvar(UpvarId {
+ var_path: UpvarPath { hir_id: vid },
+ ..
+ }),
+ ..
+ } = &cmt.place
+ {
+ if !projections.is_empty() {
+ self.add_mutably_used_var(*vid);
+ }
+ }
+ }
+
+ fn copy(&mut self, _cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId) {
+ self.prev_bind = None;
+ }
+
+ fn fake_read(
+ &mut self,
+ cmt: &rustc_hir_typeck::expr_use_visitor::PlaceWithHirId<'tcx>,
+ cause: FakeReadCause,
+ _id: HirId,
+ ) {
+ if let euv::Place {
+ base:
+ euv::PlaceBase::Upvar(UpvarId {
+ var_path: UpvarPath { hir_id: vid },
+ ..
+ }),
+ ..
+ } = &cmt.place
+ {
+ if let FakeReadCause::ForLet(Some(inner)) = cause {
+ // Seems like we are inside an async function. We need to store the closure `DefId`
+ // to go through it afterwards.
+ self.async_closures.insert(inner);
+ self.add_alias(cmt.hir_id, *vid);
+ self.prev_move_to_closure.insert(*vid);
+ self.prev_bind = None;
+ }
+ }
+ }
+
+ fn bind(&mut self, _cmt: &euv::PlaceWithHirId<'tcx>, id: HirId) {
+ self.prev_bind = Some(id);
+ }
+}
+
+/// A final pass to check for paths referencing this function that require the argument to be
+/// `&mut`, basically if the function is ever used as a `fn`-like argument.
+struct FnNeedsMutVisitor<'a, 'tcx> {
+ cx: &'a LateContext<'tcx>,
+ used_fn_def_ids: &'a mut FxHashSet<LocalDefId>,
+}
+
+impl<'tcx> Visitor<'tcx> for FnNeedsMutVisitor<'_, 'tcx> {
+ type NestedFilter = OnlyBodies;
+
+ fn nested_visit_map(&mut self) -> Self::Map {
+ self.cx.tcx.hir()
+ }
+
+ fn visit_qpath(&mut self, qpath: &'tcx QPath<'tcx>, hir_id: HirId, _: Span) {
+ walk_qpath(self, qpath, hir_id);
+
+ let Self { cx, used_fn_def_ids } = self;
+
+ // #11182; do not lint if mutability is required elsewhere
+ if let Node::Expr(expr) = cx.tcx.hir().get(hir_id)
+ && let Some(parent) = get_parent_node(cx.tcx, expr.hir_id)
+ && let ty::FnDef(def_id, _) = cx.tcx.typeck(cx.tcx.hir().enclosing_body_owner(hir_id)).expr_ty(expr).kind()
+ && let Some(def_id) = def_id.as_local()
+ {
+ if let Node::Expr(e) = parent
+ && let ExprKind::Call(call, _) = e.kind
+ && call.hir_id == expr.hir_id
+ {
+ return;
+ }
+
+ // We don't need to check each argument individually as you cannot coerce a function
+ // taking `&mut` -> `&`, for some reason, so if we've gotten this far we know it's
+ // passed as a `fn`-like argument (or is unified) and should ignore every "unused"
+ // argument entirely
+ used_fn_def_ids.insert(def_id);
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
index f11d5773d..5ee26966f 100644
--- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
@@ -2,7 +2,7 @@ use clippy_utils::diagnostics::{multispan_sugg, span_lint_and_then};
use clippy_utils::ptr::get_spans;
use clippy_utils::source::{snippet, snippet_opt};
use clippy_utils::ty::{
- implements_trait, implements_trait_with_env, is_copy, is_type_diagnostic_item, is_type_lang_item,
+ implements_trait, implements_trait_with_env_from_iter, is_copy, is_type_diagnostic_item, is_type_lang_item,
};
use clippy_utils::{get_trait_def_id, is_self, paths};
use if_chain::if_chain;
@@ -10,14 +10,14 @@ use rustc_ast::ast::Attribute;
use rustc_errors::{Applicability, Diagnostic};
use rustc_hir::intravisit::FnKind;
use rustc_hir::{
- BindingAnnotation, Body, FnDecl, GenericArg, HirId, Impl, ItemKind, Mutability, Node, PatKind, QPath, TyKind,
+ BindingAnnotation, Body, FnDecl, GenericArg, HirId, HirIdSet, Impl, ItemKind, LangItem, Mutability, Node, PatKind,
+ QPath, TyKind,
};
-use rustc_hir::{HirIdSet, LangItem};
use rustc_hir_typeck::expr_use_visitor as euv;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::mir::FakeReadCause;
-use rustc_middle::ty::{self, TypeVisitableExt, Ty};
+use rustc_middle::ty::{self, Ty, TypeVisitableExt};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::kw;
@@ -140,7 +140,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
ctx
};
- let fn_sig = cx.tcx.fn_sig(fn_def_id).subst_identity();
+ let fn_sig = cx.tcx.fn_sig(fn_def_id).instantiate_identity();
let fn_sig = cx.tcx.liberate_late_bound_regions(fn_def_id.to_def_id(), fn_sig);
for (idx, ((input, &ty), arg)) in decl.inputs.iter().zip(fn_sig.inputs()).zip(body.params).enumerate() {
@@ -168,9 +168,9 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
(
preds.iter().any(|t| cx.tcx.is_diagnostic_item(sym::Borrow, t.def_id())),
!preds.is_empty() && {
- let ty_empty_region = Ty::new_imm_ref(cx.tcx,cx.tcx.lifetimes.re_erased, ty);
+ let ty_empty_region = Ty::new_imm_ref(cx.tcx, cx.tcx.lifetimes.re_erased, ty);
preds.iter().all(|t| {
- let ty_params = t.trait_ref.substs.iter().skip(1).collect::<Vec<_>>();
+ let ty_params = t.trait_ref.args.iter().skip(1).collect::<Vec<_>>();
implements_trait(cx, ty_empty_region, t.def_id(), &ty_params)
})
},
@@ -182,7 +182,13 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
if !ty.is_mutable_ptr();
if !is_copy(cx, ty);
if ty.is_sized(cx.tcx, cx.param_env);
- if !allowed_traits.iter().any(|&t| implements_trait_with_env(cx.tcx, cx.param_env, ty, t, [None]));
+ if !allowed_traits.iter().any(|&t| implements_trait_with_env_from_iter(
+ cx.tcx,
+ cx.param_env,
+ ty,
+ t,
+ [Option::<ty::GenericArg<'tcx>>::None],
+ ));
if !implements_borrow_trait;
if !all_borrowable_trait;
@@ -289,7 +295,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue {
}
/// Functions marked with these attributes must have the exact signature.
-fn requires_exact_signature(attrs: &[Attribute]) -> bool {
+pub(crate) fn requires_exact_signature(attrs: &[Attribute]) -> bool {
attrs.iter().any(|attr| {
[sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
.iter()
diff --git a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
index e2a7ba02a..7b0f7eaf1 100644
--- a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
+++ b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs
@@ -122,7 +122,7 @@ fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
} else {
return;
};
- if let ExprKind::Match(inner_expr_with_q, _, MatchSource::TryDesugar) = &arg.kind;
+ if let ExprKind::Match(inner_expr_with_q, _, MatchSource::TryDesugar(_)) = &arg.kind;
if let ExprKind::Call(called, [inner_expr]) = &inner_expr_with_q.kind;
if let ExprKind::Path(QPath::LangItem(LangItem::TryTraitBranch, ..)) = &called.kind;
if expr.span.ctxt() == inner_expr.span.ctxt();
diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs
index 653b1a8a0..cf7cd671d 100644
--- a/src/tools/clippy/clippy_lints/src/new_without_default.rs
+++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs
@@ -98,14 +98,14 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault {
if name == sym::new;
if cx.effective_visibilities.is_reachable(impl_item.owner_id.def_id);
let self_def_id = cx.tcx.hir().get_parent_item(id.into());
- let self_ty = cx.tcx.type_of(self_def_id).subst_identity();
+ let self_ty = cx.tcx.type_of(self_def_id).instantiate_identity();
if self_ty == return_ty(cx, id);
if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default);
then {
if self.impling_types.is_none() {
let mut impls = HirIdSet::default();
cx.tcx.for_each_impl(default_trait_id, |d| {
- let ty = cx.tcx.type_of(d).subst_identity();
+ let ty = cx.tcx.type_of(d).instantiate_identity();
if let Some(ty_def) = ty.ty_adt_def() {
if let Some(local_def_id) = ty_def.did().as_local() {
impls.insert(cx.tcx.hir().local_def_id_to_hir_id(local_def_id));
@@ -119,7 +119,7 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault {
// generics
if_chain! {
if let Some(ref impling_types) = self.impling_types;
- let self_def = cx.tcx.type_of(self_def_id).subst_identity();
+ let self_def = cx.tcx.type_of(self_def_id).instantiate_identity();
if let Some(self_def) = self_def.ty_adt_def();
if let Some(self_local_did) = self_def.did().as_local();
let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_local_did);
diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs
index a4c7da7e4..5f2a324b0 100644
--- a/src/tools/clippy/clippy_lints/src/no_effect.rs
+++ b/src/tools/clippy/clippy_lints/src/no_effect.rs
@@ -1,8 +1,7 @@
use clippy_utils::diagnostics::{span_lint_hir, span_lint_hir_and_then};
-use clippy_utils::peel_blocks;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::has_drop;
-use clippy_utils::{get_parent_node, is_lint_allowed};
+use clippy_utils::{get_parent_node, is_lint_allowed, peel_blocks};
use rustc_errors::Applicability;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::{
@@ -161,7 +160,7 @@ fn has_no_effect(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
match peel_blocks(expr).kind {
ExprKind::Lit(..) | ExprKind::Closure { .. } => true,
ExprKind::Path(..) => !has_drop(cx, cx.typeck_results().expr_ty(expr)),
- ExprKind::Index(a, b) | ExprKind::Binary(_, a, b) => has_no_effect(cx, a) && has_no_effect(cx, b),
+ ExprKind::Index(a, b, _) | ExprKind::Binary(_, a, b) => has_no_effect(cx, a) && has_no_effect(cx, b),
ExprKind::Array(v) | ExprKind::Tup(v) => v.iter().all(|val| has_no_effect(cx, val)),
ExprKind::Repeat(inner, _)
| ExprKind::Cast(inner, _)
@@ -264,7 +263,7 @@ fn reduce_expression<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<Vec
return None;
}
match expr.kind {
- ExprKind::Index(a, b) => Some(vec![a, b]),
+ ExprKind::Index(a, b, _) => Some(vec![a, b]),
ExprKind::Binary(ref binop, a, b) if binop.node != BinOpKind::And && binop.node != BinOpKind::Or => {
Some(vec![a, b])
},
diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs
index 75f1e9527..243192385 100644
--- a/src/tools/clippy/clippy_lints/src/non_copy_const.rs
+++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs
@@ -15,14 +15,12 @@ use rustc_hir::{
};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass, Lint};
-use rustc_middle::mir::interpret::ErrorHandled;
+use rustc_middle::mir::interpret::{ErrorHandled, EvalToValTreeResult, GlobalId};
use rustc_middle::ty::adjustment::Adjust;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{sym, InnerSpan, Span};
use rustc_target::abi::VariantIdx;
-use rustc_middle::mir::interpret::EvalToValTreeResult;
-use rustc_middle::mir::interpret::GlobalId;
// FIXME: this is a correctness problem but there's no suitable
// warn-by-default category.
@@ -154,24 +152,32 @@ fn is_value_unfrozen_raw<'tcx>(
// As of 2022-09-08 miri doesn't track which union field is active so there's no safe way to check the
// contained value.
ty::Adt(def, ..) if def.is_union() => false,
- ty::Array(ty, _) => {
- val.unwrap_branch().iter().any(|field| inner(cx, *field, ty))
- },
+ ty::Array(ty, _) => val.unwrap_branch().iter().any(|field| inner(cx, *field, ty)),
ty::Adt(def, _) if def.is_union() => false,
- ty::Adt(def, substs) if def.is_enum() => {
+ ty::Adt(def, args) if def.is_enum() => {
let (&variant_index, fields) = val.unwrap_branch().split_first().unwrap();
- let variant_index =
- VariantIdx::from_u32(variant_index.unwrap_leaf().try_to_u32().ok().unwrap());
- fields.iter().copied().zip(
- def.variants()[variant_index]
- .fields
- .iter()
- .map(|field| field.ty(cx.tcx, substs))).any(|(field, ty)| inner(cx, field, ty))
- }
- ty::Adt(def, substs) => {
- val.unwrap_branch().iter().zip(def.non_enum_variant().fields.iter().map(|field| field.ty(cx.tcx, substs))).any(|(field, ty)| inner(cx, *field, ty))
- }
- ty::Tuple(tys) => val.unwrap_branch().iter().zip(tys).any(|(field, ty)| inner(cx, *field, ty)),
+ let variant_index = VariantIdx::from_u32(variant_index.unwrap_leaf().try_to_u32().ok().unwrap());
+ fields
+ .iter()
+ .copied()
+ .zip(
+ def.variants()[variant_index]
+ .fields
+ .iter()
+ .map(|field| field.ty(cx.tcx, args)),
+ )
+ .any(|(field, ty)| inner(cx, field, ty))
+ },
+ ty::Adt(def, args) => val
+ .unwrap_branch()
+ .iter()
+ .zip(def.non_enum_variant().fields.iter().map(|field| field.ty(cx.tcx, args)))
+ .any(|(field, ty)| inner(cx, *field, ty)),
+ ty::Tuple(tys) => val
+ .unwrap_branch()
+ .iter()
+ .zip(tys)
+ .any(|(field, ty)| inner(cx, *field, ty)),
_ => false,
}
}
@@ -206,33 +212,38 @@ fn is_value_unfrozen_raw<'tcx>(
fn is_value_unfrozen_poly<'tcx>(cx: &LateContext<'tcx>, body_id: BodyId, ty: Ty<'tcx>) -> bool {
let def_id = body_id.hir_id.owner.to_def_id();
- let substs = ty::InternalSubsts::identity_for_item(cx.tcx, def_id);
- let instance = ty::Instance::new(def_id, substs);
- let cid = rustc_middle::mir::interpret::GlobalId { instance, promoted: None };
+ let args = ty::GenericArgs::identity_for_item(cx.tcx, def_id);
+ let instance = ty::Instance::new(def_id, args);
+ let cid = rustc_middle::mir::interpret::GlobalId {
+ instance,
+ promoted: None,
+ };
let param_env = cx.tcx.param_env(def_id).with_reveal_all_normalized(cx.tcx);
let result = cx.tcx.const_eval_global_id_for_typeck(param_env, cid, None);
is_value_unfrozen_raw(cx, result, ty)
}
fn is_value_unfrozen_expr<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId, def_id: DefId, ty: Ty<'tcx>) -> bool {
- let substs = cx.typeck_results().node_substs(hir_id);
+ let args = cx.typeck_results().node_args(hir_id);
- let result = const_eval_resolve(cx.tcx, cx.param_env, ty::UnevaluatedConst::new(def_id, substs), None);
+ let result = const_eval_resolve(cx.tcx, cx.param_env, ty::UnevaluatedConst::new(def_id, args), None);
is_value_unfrozen_raw(cx, result, ty)
}
-
pub fn const_eval_resolve<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
ct: ty::UnevaluatedConst<'tcx>,
span: Option<Span>,
) -> EvalToValTreeResult<'tcx> {
- match ty::Instance::resolve(tcx, param_env, ct.def, ct.substs) {
+ match ty::Instance::resolve(tcx, param_env, ct.def, ct.args) {
Ok(Some(instance)) => {
- let cid = GlobalId { instance, promoted: None };
+ let cid = GlobalId {
+ instance,
+ promoted: None,
+ };
tcx.const_eval_global_id_for_typeck(param_env, cid, span)
- }
+ },
Ok(None) => Err(ErrorHandled::TooGeneric),
Err(err) => Err(ErrorHandled::Reported(err.into())),
}
@@ -286,7 +297,7 @@ declare_lint_pass!(NonCopyConst => [DECLARE_INTERIOR_MUTABLE_CONST, BORROW_INTER
impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
fn check_item(&mut self, cx: &LateContext<'tcx>, it: &'tcx Item<'_>) {
- if let ItemKind::Const(hir_ty, body_id) = it.kind {
+ if let ItemKind::Const(hir_ty, _generics, body_id) = it.kind {
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
if !ignored_macro(cx, it) && is_unfrozen(cx, ty) && is_value_unfrozen_poly(cx, body_id, ty) {
lint(cx, Source::Item { item: it.span });
@@ -347,7 +358,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
// and, in that case, the definition is *not* generic.
cx.tcx.normalize_erasing_regions(
cx.tcx.param_env(of_trait_def_id),
- cx.tcx.type_of(of_assoc_item).subst_identity(),
+ cx.tcx.type_of(of_assoc_item).instantiate_identity(),
),
))
.is_err();
@@ -392,7 +403,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
// Make sure it is a const item.
let Res::Def(DefKind::Const | DefKind::AssocConst, item_def_id) = cx.qpath_res(qpath, expr.hir_id) else {
- return
+ return;
};
// Climb up to resolve any field access and explicit referencing.
@@ -427,7 +438,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst {
dereferenced_expr = parent_expr;
},
- ExprKind::Index(e, _) if ptr::eq(&**e, cur_expr) => {
+ ExprKind::Index(e, _, _) if ptr::eq(&**e, cur_expr) => {
// `e[i]` => desugared to `*Index::index(&e, i)`,
// meaning `e` must be referenced.
// no need to go further up since a method call is involved now.
diff --git a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
index 9f6917c14..d562047cb 100644
--- a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
+++ b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs
@@ -91,7 +91,7 @@ struct ExistingName {
struct SimilarNamesLocalVisitor<'a, 'tcx> {
names: Vec<ExistingName>,
cx: &'a EarlyContext<'tcx>,
- lint: &'a NonExpressiveNames,
+ lint: NonExpressiveNames,
/// A stack of scopes containing the single-character bindings in each scope.
single_char_names: Vec<Vec<Ident>>,
@@ -365,7 +365,7 @@ impl EarlyLintPass for NonExpressiveNames {
..
}) = item.kind
{
- do_check(self, cx, &item.attrs, &sig.decl, blk);
+ do_check(*self, cx, &item.attrs, &sig.decl, blk);
}
}
@@ -380,12 +380,12 @@ impl EarlyLintPass for NonExpressiveNames {
..
}) = item.kind
{
- do_check(self, cx, &item.attrs, &sig.decl, blk);
+ do_check(*self, cx, &item.attrs, &sig.decl, blk);
}
}
}
-fn do_check(lint: &mut NonExpressiveNames, cx: &EarlyContext<'_>, attrs: &[Attribute], decl: &FnDecl, blk: &Block) {
+fn do_check(lint: NonExpressiveNames, cx: &EarlyContext<'_>, attrs: &[Attribute], decl: &FnDecl, blk: &Block) {
if !attrs.iter().any(|attr| attr.has_name(sym::test)) {
let mut visitor = SimilarNamesLocalVisitor {
names: Vec::new(),
diff --git a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
index 7eaa7db78..c5e777c20 100644
--- a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
+++ b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs
@@ -7,7 +7,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{FieldDef, Item, ItemKind, Node};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
-use rustc_middle::ty::{self, subst::GenericArgKind, Ty};
+use rustc_middle::ty::{self, GenericArgKind, Ty};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
@@ -90,8 +90,8 @@ impl<'tcx> LateLintPass<'tcx> for NonSendFieldInSendTy {
if send_trait == trait_id;
if hir_impl.polarity == ImplPolarity::Positive;
if let Some(ty_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id);
- if let self_ty = ty_trait_ref.subst_identity().self_ty();
- if let ty::Adt(adt_def, impl_trait_substs) = self_ty.kind();
+ if let self_ty = ty_trait_ref.instantiate_identity().self_ty();
+ if let ty::Adt(adt_def, impl_trait_args) = self_ty.kind();
then {
let mut non_send_fields = Vec::new();
@@ -104,7 +104,7 @@ impl<'tcx> LateLintPass<'tcx> for NonSendFieldInSendTy {
.as_local()
.map(|local_def_id| hir_map.local_def_id_to_hir_id(local_def_id));
if !is_lint_allowed(cx, NON_SEND_FIELDS_IN_SEND_TY, field_hir_id);
- if let field_ty = field.ty(cx.tcx, impl_trait_substs);
+ if let field_ty = field.ty(cx.tcx, impl_trait_args);
if !ty_allowed_in_send(cx, field_ty, send_trait);
if let Node::Field(field_def) = hir_map.get(field_hir_id);
then {
@@ -206,10 +206,10 @@ fn ty_allowed_with_raw_pointer_heuristic<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'t
.iter()
.all(|ty| ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait)),
ty::Array(ty, _) | ty::Slice(ty) => ty_allowed_with_raw_pointer_heuristic(cx, *ty, send_trait),
- ty::Adt(_, substs) => {
+ ty::Adt(_, args) => {
if contains_pointer_like(cx, ty) {
// descends only if ADT contains any raw pointers
- substs.iter().all(|generic_arg| match generic_arg.unpack() {
+ args.iter().all(|generic_arg| match generic_arg.unpack() {
GenericArgKind::Type(ty) => ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait),
// Lifetimes and const generics are not solid part of ADT and ignored
GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => true,
@@ -224,7 +224,7 @@ fn ty_allowed_with_raw_pointer_heuristic<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'t
}
}
-/// Checks if the type contains any pointer-like types in substs (including nested ones)
+/// Checks if the type contains any pointer-like types in args (including nested ones)
fn contains_pointer_like<'tcx>(cx: &LateContext<'tcx>, target_ty: Ty<'tcx>) -> bool {
for ty_node in target_ty.walk() {
if let GenericArgKind::Type(inner_ty) = ty_node.unpack() {
diff --git a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
index 2d79a5c90..bd194b935 100644
--- a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
+++ b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs
@@ -1,7 +1,5 @@
-use std::{
- fmt,
- hash::{Hash, Hasher},
-};
+use std::fmt;
+use std::hash::{Hash, Hasher};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_opt;
diff --git a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
index 8b77a5c99..3dc652f9d 100644
--- a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
+++ b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs
@@ -7,8 +7,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::hir_id::HirIdMap;
use rustc_hir::{Body, Expr, ExprKind, HirId, ImplItem, ImplItemKind, Node, PatKind, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::subst::{EarlyBinder, GenericArgKind, SubstsRef};
-use rustc_middle::ty::{self, ConstKind};
+use rustc_middle::ty::{self, ConstKind, EarlyBinder, GenericArgKind, GenericArgsRef};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
@@ -90,7 +89,7 @@ impl_lint_pass!(OnlyUsedInRecursion => [ONLY_USED_IN_RECURSION]);
enum FnKind {
Fn,
TraitFn,
- // This is a hack. Ideally we would store a `SubstsRef<'tcx>` type here, but a lint pass must be `'static`.
+ // This is a hack. Ideally we would store a `GenericArgsRef<'tcx>` type here, but a lint pass must be `'static`.
// Substitutions are, however, interned. This allows us to store the pointer as a `usize` when comparing for
// equality.
ImplTraitFn(usize),
@@ -244,12 +243,12 @@ impl<'tcx> LateLintPass<'tcx> for OnlyUsedInRecursion {
})) => {
#[allow(trivial_casts)]
if let Some(Node::Item(item)) = get_parent_node(cx.tcx, owner_id.into())
- && let Some(trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(EarlyBinder::subst_identity)
+ && let Some(trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(EarlyBinder::instantiate_identity)
&& let Some(trait_item_id) = cx.tcx.associated_item(owner_id).trait_item_def_id
{
(
trait_item_id,
- FnKind::ImplTraitFn(cx.tcx.erase_regions(trait_ref.substs) as *const _ as usize),
+ FnKind::ImplTraitFn(cx.tcx.erase_regions(trait_ref.args) as *const _ as usize),
usize::from(sig.decl.implicit_self.has_implicit_self()),
)
} else {
@@ -289,7 +288,7 @@ impl<'tcx> LateLintPass<'tcx> for OnlyUsedInRecursion {
ExprKind::Call(callee, args)
if path_def_id(cx, callee).map_or(false, |id| {
id == param.fn_id
- && has_matching_substs(param.fn_kind, typeck.node_substs(callee.hir_id))
+ && has_matching_args(param.fn_kind, typeck.node_args(callee.hir_id))
}) =>
{
if let Some(idx) = args.iter().position(|arg| arg.hir_id == child_id) {
@@ -300,7 +299,7 @@ impl<'tcx> LateLintPass<'tcx> for OnlyUsedInRecursion {
ExprKind::MethodCall(_, receiver, args, _)
if typeck.type_dependent_def_id(parent.hir_id).map_or(false, |id| {
id == param.fn_id
- && has_matching_substs(param.fn_kind, typeck.node_substs(parent.hir_id))
+ && has_matching_args(param.fn_kind, typeck.node_args(parent.hir_id))
}) =>
{
if let Some(idx) = iter::once(receiver).chain(args).position(|arg| arg.hir_id == child_id) {
@@ -381,15 +380,15 @@ impl<'tcx> LateLintPass<'tcx> for OnlyUsedInRecursion {
}
}
-fn has_matching_substs(kind: FnKind, substs: SubstsRef<'_>) -> bool {
+fn has_matching_args(kind: FnKind, args: GenericArgsRef<'_>) -> bool {
match kind {
FnKind::Fn => true,
- FnKind::TraitFn => substs.iter().enumerate().all(|(idx, subst)| match subst.unpack() {
+ FnKind::TraitFn => args.iter().enumerate().all(|(idx, subst)| match subst.unpack() {
GenericArgKind::Lifetime(_) => true,
GenericArgKind::Type(ty) => matches!(*ty.kind(), ty::Param(ty) if ty.index as usize == idx),
GenericArgKind::Const(c) => matches!(c.kind(), ConstKind::Param(c) if c.index as usize == idx),
}),
#[allow(trivial_casts)]
- FnKind::ImplTraitFn(expected_substs) => substs as *const _ as usize == expected_substs,
+ FnKind::ImplTraitFn(expected_args) => args as *const _ as usize == expected_args,
}
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
index 5c240276b..f9108145c 100644
--- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs
@@ -1,26 +1,20 @@
use super::ARITHMETIC_SIDE_EFFECTS;
-use clippy_utils::is_from_proc_macro;
-use clippy_utils::{
- consts::{constant, constant_simple, Constant},
- diagnostics::span_lint,
- is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary,
-};
-use rustc_ast as ast;
+use clippy_utils::consts::{constant, constant_simple, Constant};
+use clippy_utils::diagnostics::span_lint;
+use clippy_utils::{expr_or_init, is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
use rustc_session::impl_lint_pass;
-use rustc_span::{
- source_map::{Span, Spanned},
- Symbol,
-};
+use rustc_span::source_map::{Span, Spanned};
+use rustc_span::Symbol;
+use {rustc_ast as ast, rustc_hir as hir};
const HARD_CODED_ALLOWED_BINARY: &[[&str; 2]] = &[
["f32", "f32"],
["f64", "f64"],
- ["std::num::Saturating", "std::num::Saturating"],
- ["std::num::Wrapping", "std::num::Wrapping"],
+ ["std::num::Saturating", "*"],
+ ["std::num::Wrapping", "*"],
["std::string::String", "str"],
];
const HARD_CODED_ALLOWED_UNARY: &[&str] = &["f32", "f64", "std::num::Saturating", "std::num::Wrapping"];
@@ -144,8 +138,10 @@ impl ArithmeticSideEffects {
) {
return;
};
- let (actual_lhs, lhs_ref_counter) = peel_hir_expr_refs(lhs);
- let (actual_rhs, rhs_ref_counter) = peel_hir_expr_refs(rhs);
+ let (mut actual_lhs, lhs_ref_counter) = peel_hir_expr_refs(lhs);
+ let (mut actual_rhs, rhs_ref_counter) = peel_hir_expr_refs(rhs);
+ actual_lhs = expr_or_init(cx, actual_lhs);
+ actual_rhs = expr_or_init(cx, actual_rhs);
let lhs_ty = cx.typeck_results().expr_ty(actual_lhs).peel_refs();
let rhs_ty = cx.typeck_results().expr_ty(actual_rhs).peel_refs();
if self.has_allowed_binary(lhs_ty, rhs_ty) {
@@ -200,7 +196,9 @@ impl ArithmeticSideEffects {
ps: &hir::PathSegment<'tcx>,
receiver: &hir::Expr<'tcx>,
) {
- let Some(arg) = args.first() else { return; };
+ let Some(arg) = args.first() else {
+ return;
+ };
if constant_simple(cx, cx.typeck_results(), receiver).is_some() {
return;
}
@@ -225,7 +223,9 @@ impl ArithmeticSideEffects {
un_expr: &hir::Expr<'tcx>,
un_op: hir::UnOp,
) {
- let hir::UnOp::Neg = un_op else { return; };
+ let hir::UnOp::Neg = un_op else {
+ return;
+ };
if constant(cx, cx.typeck_results(), un_expr).is_some() {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
index 9bbf385fb..c4572a09d 100644
--- a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs
@@ -1,9 +1,8 @@
-use clippy_utils::binop_traits;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::implements_trait;
use clippy_utils::visitors::for_each_expr;
-use clippy_utils::{eq_expr_value, trait_ref_of_method};
+use clippy_utils::{binop_traits, eq_expr_value, trait_ref_of_method};
use core::ops::ControlFlow;
use if_chain::if_chain;
use rustc_errors::Applicability;
diff --git a/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs b/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs
index 1fddf0f50..c146f3ae9 100644
--- a/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/bit_mask.rs
@@ -40,9 +40,9 @@ fn check_compare(cx: &LateContext<'_>, bit_op: &Expr<'_>, cmp_op: BinOpKind, cmp
if op.node != BinOpKind::BitAnd && op.node != BinOpKind::BitOr {
return;
}
- fetch_int_literal(cx, right)
- .or_else(|| fetch_int_literal(cx, left))
- .map_or((), |mask| check_bit_mask(cx, op.node, cmp_op, mask, cmp_value, span));
+ if let Some(mask) = fetch_int_literal(cx, right).or_else(|| fetch_int_literal(cx, left)) {
+ check_bit_mask(cx, op.node, cmp_op, mask, cmp_value, span);
+ }
}
}
diff --git a/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs b/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs
new file mode 100644
index 000000000..abe8df195
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/operators/const_comparisons.rs
@@ -0,0 +1,207 @@
+#![allow(clippy::match_same_arms)]
+
+use std::cmp::Ordering;
+
+use clippy_utils::consts::{constant, Constant};
+use if_chain::if_chain;
+use rustc_hir::{BinOpKind, Expr, ExprKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::layout::HasTyCtxt;
+use rustc_middle::ty::{Ty, TypeckResults};
+use rustc_span::source_map::{Span, Spanned};
+
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::source::snippet;
+use clippy_utils::SpanlessEq;
+
+use super::{IMPOSSIBLE_COMPARISONS, REDUNDANT_COMPARISONS};
+
+// Extract a comparison between a const and non-const
+// Flip yoda conditionals, turnings expressions like `42 < x` into `x > 42`
+fn comparison_to_const<'tcx>(
+ cx: &LateContext<'tcx>,
+ typeck: &TypeckResults<'tcx>,
+ expr: &'tcx Expr<'tcx>,
+) -> Option<(CmpOp, &'tcx Expr<'tcx>, &'tcx Expr<'tcx>, Constant<'tcx>, Ty<'tcx>)> {
+ if_chain! {
+ if let ExprKind::Binary(operator, left, right) = expr.kind;
+ if let Ok(cmp_op) = CmpOp::try_from(operator.node);
+ then {
+ match (constant(cx, typeck, left), constant(cx, typeck, right)) {
+ (Some(_), Some(_)) => None,
+ (_, Some(con)) => Some((cmp_op, left, right, con, typeck.expr_ty(right))),
+ (Some(con), _) => Some((cmp_op.reverse(), right, left, con, typeck.expr_ty(left))),
+ _ => None,
+ }
+ } else {
+ None
+ }
+ }
+}
+
+pub(super) fn check<'tcx>(
+ cx: &LateContext<'tcx>,
+ and_op: Spanned<BinOpKind>,
+ left_cond: &'tcx Expr<'tcx>,
+ right_cond: &'tcx Expr<'tcx>,
+ span: Span,
+) {
+ if_chain! {
+ // Ensure that the binary operator is &&
+ if and_op.node == BinOpKind::And;
+
+ // Check that both operands to '&&' are themselves a binary operation
+ // The `comparison_to_const` step also checks this, so this step is just an optimization
+ if let ExprKind::Binary(_, _, _) = left_cond.kind;
+ if let ExprKind::Binary(_, _, _) = right_cond.kind;
+
+ let typeck = cx.typeck_results();
+
+ // Check that both operands to '&&' compare a non-literal to a literal
+ if let Some((left_cmp_op, left_expr, left_const_expr, left_const, left_type)) =
+ comparison_to_const(cx, typeck, left_cond);
+ if let Some((right_cmp_op, right_expr, right_const_expr, right_const, right_type)) =
+ comparison_to_const(cx, typeck, right_cond);
+
+ if left_type == right_type;
+
+ // Check that the same expression is compared in both comparisons
+ if SpanlessEq::new(cx).eq_expr(left_expr, right_expr);
+
+ if !left_expr.can_have_side_effects();
+
+ // Compare the two constant expressions
+ if let Some(ordering) = Constant::partial_cmp(cx.tcx(), left_type, &left_const, &right_const);
+
+ // Rule out the `x >= 42 && x <= 42` corner case immediately
+ // Mostly to simplify the implementation, but it is also covered by `clippy::double_comparisons`
+ if !matches!(
+ (&left_cmp_op, &right_cmp_op, ordering),
+ (CmpOp::Le | CmpOp::Ge, CmpOp::Le | CmpOp::Ge, Ordering::Equal)
+ );
+
+ then {
+ if left_cmp_op.direction() == right_cmp_op.direction() {
+ let lhs_str = snippet(cx, left_cond.span, "<lhs>");
+ let rhs_str = snippet(cx, right_cond.span, "<rhs>");
+ // We already know that either side of `&&` has no effect,
+ // but emit a different error message depending on which side it is
+ if left_side_is_useless(left_cmp_op, ordering) {
+ span_lint_and_note(
+ cx,
+ REDUNDANT_COMPARISONS,
+ span,
+ "left-hand side of `&&` operator has no effect",
+ Some(left_cond.span.until(right_cond.span)),
+ &format!("`if `{rhs_str}` evaluates to true, {lhs_str}` will always evaluate to true as well"),
+ );
+ } else {
+ span_lint_and_note(
+ cx,
+ REDUNDANT_COMPARISONS,
+ span,
+ "right-hand side of `&&` operator has no effect",
+ Some(and_op.span.to(right_cond.span)),
+ &format!("`if `{lhs_str}` evaluates to true, {rhs_str}` will always evaluate to true as well"),
+ );
+ }
+ // We could autofix this error but choose not to,
+ // because code triggering this lint probably not behaving correctly in the first place
+ }
+ else if !comparison_is_possible(left_cmp_op.direction(), ordering) {
+ let expr_str = snippet(cx, left_expr.span, "..");
+ let lhs_str = snippet(cx, left_const_expr.span, "<lhs>");
+ let rhs_str = snippet(cx, right_const_expr.span, "<rhs>");
+ let note = match ordering {
+ Ordering::Less => format!("since `{lhs_str}` < `{rhs_str}`, the expression evaluates to false for any value of `{expr_str}`"),
+ Ordering::Equal => format!("`{expr_str}` cannot simultaneously be greater than and less than `{lhs_str}`"),
+ Ordering::Greater => format!("since `{lhs_str}` > `{rhs_str}`, the expression evaluates to false for any value of `{expr_str}`"),
+ };
+ span_lint_and_note(
+ cx,
+ IMPOSSIBLE_COMPARISONS,
+ span,
+ "boolean expression will never evaluate to 'true'",
+ None,
+ &note,
+ );
+ };
+ }
+ }
+}
+
+fn left_side_is_useless(left_cmp_op: CmpOp, ordering: Ordering) -> bool {
+ // Special-case for equal constants with an inclusive comparison
+ if ordering == Ordering::Equal {
+ match left_cmp_op {
+ CmpOp::Lt | CmpOp::Gt => false,
+ CmpOp::Le | CmpOp::Ge => true,
+ }
+ } else {
+ match (left_cmp_op.direction(), ordering) {
+ (CmpOpDirection::Lesser, Ordering::Less) => false,
+ (CmpOpDirection::Lesser, Ordering::Equal) => false,
+ (CmpOpDirection::Lesser, Ordering::Greater) => true,
+ (CmpOpDirection::Greater, Ordering::Less) => true,
+ (CmpOpDirection::Greater, Ordering::Equal) => false,
+ (CmpOpDirection::Greater, Ordering::Greater) => false,
+ }
+ }
+}
+
+fn comparison_is_possible(left_cmp_direction: CmpOpDirection, ordering: Ordering) -> bool {
+ match (left_cmp_direction, ordering) {
+ (CmpOpDirection::Lesser, Ordering::Less | Ordering::Equal) => false,
+ (CmpOpDirection::Lesser, Ordering::Greater) => true,
+ (CmpOpDirection::Greater, Ordering::Greater | Ordering::Equal) => false,
+ (CmpOpDirection::Greater, Ordering::Less) => true,
+ }
+}
+
+#[derive(PartialEq, Eq, Clone, Copy)]
+enum CmpOpDirection {
+ Lesser,
+ Greater,
+}
+
+#[derive(Clone, Copy)]
+enum CmpOp {
+ Lt,
+ Le,
+ Ge,
+ Gt,
+}
+
+impl CmpOp {
+ fn reverse(self) -> Self {
+ match self {
+ CmpOp::Lt => CmpOp::Gt,
+ CmpOp::Le => CmpOp::Ge,
+ CmpOp::Ge => CmpOp::Le,
+ CmpOp::Gt => CmpOp::Lt,
+ }
+ }
+
+ fn direction(self) -> CmpOpDirection {
+ match self {
+ CmpOp::Lt => CmpOpDirection::Lesser,
+ CmpOp::Le => CmpOpDirection::Lesser,
+ CmpOp::Ge => CmpOpDirection::Greater,
+ CmpOp::Gt => CmpOpDirection::Greater,
+ }
+ }
+}
+
+impl TryFrom<BinOpKind> for CmpOp {
+ type Error = ();
+
+ fn try_from(bin_op: BinOpKind) -> Result<Self, Self::Error> {
+ match bin_op {
+ BinOpKind::Lt => Ok(CmpOp::Lt),
+ BinOpKind::Le => Ok(CmpOp::Le),
+ BinOpKind::Ge => Ok(CmpOp::Ge),
+ BinOpKind::Gt => Ok(CmpOp::Gt),
+ _ => Err(()),
+ }
+ }
+}
diff --git a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
index 78965b7d6..88d566318 100644
--- a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs
@@ -1,6 +1,7 @@
+use clippy_utils::ast_utils::is_useless_with_eq_exprs;
use clippy_utils::diagnostics::{span_lint, span_lint_and_then};
use clippy_utils::macros::{find_assert_eq_args, first_node_macro_backtrace};
-use clippy_utils::{ast_utils::is_useless_with_eq_exprs, eq_expr_value, is_in_test_function};
+use clippy_utils::{eq_expr_value, is_in_test_function};
use rustc_hir::{BinOpKind, Expr};
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
index 015f6c14e..5eabb349e 100644
--- a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs
@@ -1,7 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_then;
-use clippy_utils::eq_expr_value;
use clippy_utils::source::snippet_opt;
-use clippy_utils::sugg;
+use clippy_utils::{eq_expr_value, sugg};
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
diff --git a/src/tools/clippy/clippy_lints/src/operators/mod.rs b/src/tools/clippy/clippy_lints/src/operators/mod.rs
index 2cf15adda..4635e1164 100644
--- a/src/tools/clippy/clippy_lints/src/operators/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/mod.rs
@@ -2,6 +2,7 @@ mod absurd_extreme_comparisons;
mod assign_op_pattern;
mod bit_mask;
mod cmp_owned;
+mod const_comparisons;
mod double_comparison;
mod duration_subsec;
mod eq_op;
@@ -300,6 +301,45 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
+ /// Checks for double comparisons that can never succeed
+ ///
+ /// ### Why is this bad?
+ /// The whole expression can be replaced by `false`,
+ /// which is probably not the programmer's intention
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let status_code = 200;
+ /// if status_code <= 400 && status_code > 500 {}
+ /// ```
+ #[clippy::version = "1.71.0"]
+ pub IMPOSSIBLE_COMPARISONS,
+ correctness,
+ "double comparisons that will never evaluate to `true`"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for ineffective double comparisons against constants.
+ ///
+ /// ### Why is this bad?
+ /// Only one of the comparisons has any effect on the result, the programmer
+ /// probably intended to flip one of the comparison operators, or compare a
+ /// different value entirely.
+ ///
+ /// ### Example
+ /// ```rust
+ /// # let status_code = 200;
+ /// if status_code <= 400 && status_code < 500 {}
+ /// ```
+ #[clippy::version = "1.71.0"]
+ pub REDUNDANT_COMPARISONS,
+ correctness,
+ "double comparisons where one of them can be removed"
+}
+
+declare_clippy_lint! {
+ /// ### What it does
/// Checks for calculation of subsecond microseconds or milliseconds
/// from other `Duration` methods.
///
@@ -742,6 +782,8 @@ impl_lint_pass!(Operators => [
INEFFECTIVE_BIT_MASK,
VERBOSE_BIT_MASK,
DOUBLE_COMPARISONS,
+ IMPOSSIBLE_COMPARISONS,
+ REDUNDANT_COMPARISONS,
DURATION_SUBSEC,
EQ_OP,
OP_REF,
@@ -786,6 +828,7 @@ impl<'tcx> LateLintPass<'tcx> for Operators {
bit_mask::check(cx, e, op.node, lhs, rhs);
verbose_bit_mask::check(cx, e, op.node, lhs, rhs, self.verbose_bit_mask_threshold);
double_comparison::check(cx, op.node, lhs, rhs, e.span);
+ const_comparisons::check(cx, op, lhs, rhs, e.span);
duration_subsec::check(cx, e, op.node, lhs, rhs);
float_equality_without_abs::check(cx, e, op.node, lhs, rhs);
integer_division::check(cx, e, op.node, lhs, rhs);
diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
index d7917e86a..932dd470f 100644
--- a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs
@@ -4,7 +4,9 @@ use clippy_utils::source::snippet;
use clippy_utils::ty::{implements_trait, is_copy};
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{def::Res, def_id::DefId, BinOpKind, BorrowKind, Expr, ExprKind, GenericArg, ItemKind, QPath, TyKind};
+use rustc_hir::def::Res;
+use rustc_hir::def_id::DefId;
+use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, GenericArg, ItemKind, QPath, TyKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
diff --git a/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs b/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
index 377bddeaa..9c7f7e1cd 100644
--- a/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/option_env_unwrap.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::is_direct_expn_of;
-use if_chain::if_chain;
use rustc_ast::ast::{Expr, ExprKind, MethodCall};
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::sym;
+use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
@@ -36,21 +35,27 @@ declare_lint_pass!(OptionEnvUnwrap => [OPTION_ENV_UNWRAP]);
impl EarlyLintPass for OptionEnvUnwrap {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
- if_chain! {
- if let ExprKind::MethodCall(box MethodCall { seg, receiver, .. }) = &expr.kind;
- if matches!(seg.ident.name, sym::expect | sym::unwrap);
- if let ExprKind::Call(caller, _) = &receiver.kind;
- if is_direct_expn_of(caller.span, "option_env").is_some();
- then {
- span_lint_and_help(
- cx,
- OPTION_ENV_UNWRAP,
- expr.span,
- "this will panic at run-time if the environment variable doesn't exist at compile-time",
- None,
- "consider using the `env!` macro instead"
- );
- }
+ fn lint(cx: &EarlyContext<'_>, span: Span) {
+ span_lint_and_help(
+ cx,
+ OPTION_ENV_UNWRAP,
+ span,
+ "this will panic at run-time if the environment variable doesn't exist at compile-time",
+ None,
+ "consider using the `env!` macro instead",
+ );
}
+
+ if let ExprKind::MethodCall(box MethodCall { seg, receiver, .. }) = &expr.kind &&
+ matches!(seg.ident.name, sym::expect | sym::unwrap) {
+ if let ExprKind::Call(caller, _) = &receiver.kind &&
+ // If it exists, it will be ::core::option::Option::Some("<env var>").unwrap() (A method call in the HIR)
+ is_direct_expn_of(caller.span, "option_env").is_some() {
+ lint(cx, expr.span);
+ } else if let ExprKind::Path(_, caller) = &receiver.kind && // If it doesn't exist, it will be ::core::option::Option::None::<&'static str>.unwrap() (A path in the HIR)
+ is_direct_expn_of(caller.span, "option_env").is_some() {
+ lint(cx, expr.span);
+ }
+ }
}
}
diff --git a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
index abdccc47f..a7a7f4fd8 100644
--- a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
+++ b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs
@@ -6,10 +6,9 @@ use clippy_utils::{
};
use if_chain::if_chain;
use rustc_errors::Applicability;
+use rustc_hir::def::Res;
use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk};
-use rustc_hir::{
- def::Res, Arm, BindingAnnotation, Expr, ExprKind, MatchSource, Mutability, Pat, PatKind, Path, QPath, UnOp,
-};
+use rustc_hir::{Arm, BindingAnnotation, Expr, ExprKind, MatchSource, Mutability, Pat, PatKind, Path, QPath, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::SyntaxContext;
@@ -156,7 +155,7 @@ fn try_get_option_occurrence<'tcx>(
});
if let ExprKind::Path(QPath::Resolved(None, Path { res: Res::Local(local_id), .. })) = e.kind {
match some_captures.get(local_id)
- .or_else(|| (method_sugg == "map_or_else").then_some(()).and_then(|_| none_captures.get(local_id)))
+ .or_else(|| (method_sugg == "map_or_else").then_some(()).and_then(|()| none_captures.get(local_id)))
{
Some(CaptureKind::Value | CaptureKind::Ref(Mutability::Mut)) => return None,
Some(CaptureKind::Ref(Mutability::Not)) if as_mut => return None,
diff --git a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
index 849cd03dd..a049427d8 100644
--- a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs
@@ -13,7 +13,7 @@ use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
- /// Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result.
+ /// Checks for usage of `panic!` or assertions in a function of type result.
///
/// ### Why is this bad?
/// For some codebases, it is desirable for functions of type result to return an error instead of crashing. Hence panicking macros should be avoided.
@@ -37,7 +37,7 @@ declare_clippy_lint! {
#[clippy::version = "1.48.0"]
pub PANIC_IN_RESULT_FN,
restriction,
- "functions of type `Result<..>` that contain `panic!()`, `todo!()`, `unreachable()`, `unimplemented()` or assertion"
+ "functions of type `Result<..>` that contain `panic!()` or assertion"
}
declare_lint_pass!(PanicInResultFn => [PANIC_IN_RESULT_FN]);
@@ -70,7 +70,7 @@ fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, body: &'tcx hir
};
if matches!(
cx.tcx.item_name(macro_call.def_id).as_str(),
- "unimplemented" | "unreachable" | "panic" | "todo" | "assert" | "assert_eq" | "assert_ne"
+ "panic" | "assert" | "assert_eq" | "assert_ne"
) {
panics.push(macro_call.span);
ControlFlow::Continue(Descend::No)
@@ -83,10 +83,10 @@ fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, body: &'tcx hir
cx,
PANIC_IN_RESULT_FN,
impl_span,
- "used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`",
+ "used `panic!()` or assertion in a function that returns `Result`",
move |diag| {
diag.help(
- "`unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing",
+ "`panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing",
);
diag.span_note(panics, "return Err() instead of panicking");
},
diff --git a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
index 2f3007658..a72aefe91 100644
--- a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
+++ b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs
@@ -76,7 +76,9 @@ declare_lint_pass!(PanicUnimplemented => [UNIMPLEMENTED, UNREACHABLE, TODO, PANI
impl<'tcx> LateLintPass<'tcx> for PanicUnimplemented {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
if is_panic(cx, macro_call.def_id) {
if cx.tcx.hir().is_inside_const_context(expr.hir_id) {
return;
diff --git a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs
index 456ded3fc..d9f5d1642 100644
--- a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs
+++ b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs
@@ -1,7 +1,6 @@
-use clippy_utils::{
- diagnostics::span_lint_and_sugg, is_res_lang_ctor, path_res, peel_hir_expr_refs, peel_ref_operators, sugg,
- ty::is_type_diagnostic_item,
-};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_diagnostic_item;
+use clippy_utils::{is_res_lang_ctor, path_res, peel_hir_expr_refs, peel_ref_operators, sugg};
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, LangItem};
use rustc_lint::{LateContext, LateLintPass};
diff --git a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
index eab725de1..41513647f 100644
--- a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
+++ b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
@@ -1,5 +1,4 @@
-use std::cmp;
-use std::iter;
+use std::{cmp, iter};
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
@@ -143,7 +142,7 @@ impl<'tcx> PassByRefOrValue {
return;
}
- let fn_sig = cx.tcx.fn_sig(def_id).subst_identity();
+ let fn_sig = cx.tcx.fn_sig(def_id).instantiate_identity();
let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id));
// Gather all the lifetimes found in the output type which may affect whether
diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs
index 32213718b..8009b00b4 100644
--- a/src/tools/clippy/clippy_lints/src/ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/ptr.rs
@@ -26,10 +26,10 @@ use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::sym;
use rustc_span::symbol::Symbol;
+use rustc_target::spec::abi::Abi;
use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
-use std::fmt;
-use std::iter;
+use std::{fmt, iter};
declare_clippy_lint! {
/// ### What it does
@@ -164,9 +164,19 @@ impl<'tcx> LateLintPass<'tcx> for Ptr {
}
check_mut_from_ref(cx, sig, None);
+
+ if !matches!(sig.header.abi, Abi::Rust) {
+ // Ignore `extern` functions with non-Rust calling conventions
+ return;
+ }
+
for arg in check_fn_args(
cx,
- cx.tcx.fn_sig(item.owner_id).subst_identity().skip_binder().inputs(),
+ cx.tcx
+ .fn_sig(item.owner_id)
+ .instantiate_identity()
+ .skip_binder()
+ .inputs(),
sig.decl.inputs,
&sig.decl.output,
&[],
@@ -219,8 +229,14 @@ impl<'tcx> LateLintPass<'tcx> for Ptr {
};
check_mut_from_ref(cx, sig, Some(body));
+
+ if !matches!(sig.header.abi, Abi::Rust) {
+ // Ignore `extern` functions with non-Rust calling conventions
+ return;
+ }
+
let decl = sig.decl;
- let sig = cx.tcx.fn_sig(item_id).subst_identity().skip_binder();
+ let sig = cx.tcx.fn_sig(item_id).instantiate_identity().skip_binder();
let lint_args: Vec<_> = check_fn_args(cx, sig.inputs(), decl.inputs, &decl.output, body.params)
.filter(|arg| !is_trait_item || arg.mutability() == Mutability::Not)
.collect();
@@ -389,11 +405,12 @@ impl<'tcx> DerefTy<'tcx> {
fn ty(&self, cx: &LateContext<'tcx>) -> Ty<'tcx> {
match *self {
Self::Str => cx.tcx.types.str_,
- Self::Path => Ty::new_adt(cx.tcx,
+ Self::Path => Ty::new_adt(
+ cx.tcx,
cx.tcx.adt_def(cx.tcx.get_diagnostic_item(sym::Path).unwrap()),
List::empty(),
),
- Self::Slice(_, ty) => Ty::new_slice(cx.tcx,ty),
+ Self::Slice(_, ty) => Ty::new_slice(cx.tcx, ty),
}
}
@@ -423,7 +440,7 @@ fn check_fn_args<'cx, 'tcx: 'cx>(
.enumerate()
.filter_map(move |(i, (ty, hir_ty))| {
if let ty::Ref(_, ty, mutability) = *ty.kind()
- && let ty::Adt(adt, substs) = *ty.kind()
+ && let ty::Adt(adt, args) = *ty.kind()
&& let TyKind::Ref(lt, ref ty) = hir_ty.kind
&& let TyKind::Path(QPath::Resolved(None, path)) = ty.ty.kind
// Check that the name as typed matches the actual name of the type.
@@ -443,7 +460,7 @@ fn check_fn_args<'cx, 'tcx: 'cx>(
} else {
None
}),
- substs.type_at(0),
+ args.type_at(0),
),
),
_ if Some(adt.did()) == cx.tcx.lang_items().string() => (
@@ -496,7 +513,7 @@ fn check_fn_args<'cx, 'tcx: 'cx>(
}
let ty_name =
- snippet_opt(cx, ty.span()).unwrap_or_else(|| substs.type_at(1).to_string());
+ snippet_opt(cx, ty.span()).unwrap_or_else(|| args.type_at(1).to_string());
span_lint_hir_and_then(
cx,
@@ -659,7 +676,7 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, args:
return;
};
- match *self.cx.tcx.fn_sig(id).subst_identity().skip_binder().inputs()[i]
+ match *self.cx.tcx.fn_sig(id).instantiate_identity().skip_binder().inputs()[i]
.peel_refs()
.kind()
{
@@ -678,7 +695,7 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, args:
}
},
// Indexing is fine for currently supported types.
- ExprKind::Index(e, _) if e.hir_id == child_id => (),
+ ExprKind::Index(e, _, _) if e.hir_id == child_id => (),
_ => set_skip_flag(),
},
_ => set_skip_flag(),
@@ -725,7 +742,7 @@ fn matches_preds<'tcx>(
let infcx = cx.tcx.infer_ctxt().build();
preds.iter().all(|&p| match cx.tcx.erase_late_bound_regions(p) {
ExistentialPredicate::Trait(p) => infcx
- .type_implements_trait(p.def_id, [ty.into()].into_iter().chain(p.substs.iter()), cx.param_env)
+ .type_implements_trait(p.def_id, [ty.into()].into_iter().chain(p.args.iter()), cx.param_env)
.must_apply_modulo_regions(),
ExistentialPredicate::Projection(p) => infcx.predicate_must_hold_modulo_regions(&Obligation::new(
cx.tcx,
diff --git a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
index 47b8891e1..20e032d4b 100644
--- a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
+++ b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs
@@ -50,12 +50,12 @@ impl<'tcx> LateLintPass<'tcx> for PtrOffsetWithCast {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// Check if the expressions is a ptr.offset or ptr.wrapping_offset method call
let Some((receiver_expr, arg_expr, method)) = expr_as_ptr_offset_call(cx, expr) else {
- return
+ return;
};
// Check if the argument to the method call is a cast from usize
let Some(cast_lhs_expr) = expr_as_cast_from_usize(cx, arg_expr) else {
- return
+ return;
};
let msg = format!("use of `{method}` with a `usize` casted to an `isize`");
diff --git a/src/tools/clippy/clippy_lints/src/question_mark.rs b/src/tools/clippy/clippy_lints/src/question_mark.rs
index e3d940ad2..734ca2914 100644
--- a/src/tools/clippy/clippy_lints/src/question_mark.rs
+++ b/src/tools/clippy/clippy_lints/src/question_mark.rs
@@ -1,21 +1,26 @@
+use crate::manual_let_else::{MatchLintBehaviour, MANUAL_LET_ELSE};
+use crate::question_mark_used::QUESTION_MARK_USED;
use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::msrvs::Msrv;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{
- eq_expr_value, get_parent_node, in_constant, is_else_clause, is_res_lang_ctor, path_to_local, path_to_local_id,
- peel_blocks, peel_blocks_with_stmt,
+ eq_expr_value, get_parent_node, higher, in_constant, is_else_clause, is_lint_allowed, is_path_lang_item,
+ is_res_lang_ctor, pat_and_expr_can_be_question_mark, path_to_local, path_to_local_id, peel_blocks,
+ peel_blocks_with_stmt,
};
-use clippy_utils::{higher, is_path_lang_item};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::LangItem::{self, OptionNone, OptionSome, ResultErr, ResultOk};
-use rustc_hir::{BindingAnnotation, ByRef, Expr, ExprKind, Node, PatKind, PathSegment, QPath};
+use rustc_hir::{
+ BindingAnnotation, Block, ByRef, Expr, ExprKind, Local, Node, PatKind, PathSegment, QPath, Stmt, StmtKind,
+};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
-use rustc_session::declare_tool_lint;
-use rustc_session::impl_lint_pass;
-use rustc_span::{sym, symbol::Symbol};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::sym;
+use rustc_span::symbol::Symbol;
declare_clippy_lint! {
/// ### What it does
@@ -42,8 +47,9 @@ declare_clippy_lint! {
"checks for expressions that could be replaced by the question mark operator"
}
-#[derive(Default)]
pub struct QuestionMark {
+ pub(crate) msrv: Msrv,
+ pub(crate) matches_behaviour: MatchLintBehaviour,
/// Keeps track of how many try blocks we are in at any point during linting.
/// This allows us to answer the question "are we inside of a try block"
/// very quickly, without having to walk up the parent chain, by simply checking
@@ -51,7 +57,19 @@ pub struct QuestionMark {
/// As for why we need this in the first place: <https://github.com/rust-lang/rust-clippy/issues/8628>
try_block_depth_stack: Vec<u32>,
}
-impl_lint_pass!(QuestionMark => [QUESTION_MARK]);
+
+impl_lint_pass!(QuestionMark => [QUESTION_MARK, MANUAL_LET_ELSE]);
+
+impl QuestionMark {
+ #[must_use]
+ pub fn new(msrv: Msrv, matches_behaviour: MatchLintBehaviour) -> Self {
+ Self {
+ msrv,
+ matches_behaviour,
+ try_block_depth_stack: Vec::new(),
+ }
+ }
+}
enum IfBlockType<'hir> {
/// An `if x.is_xxx() { a } else { b } ` expression.
@@ -78,6 +96,29 @@ enum IfBlockType<'hir> {
),
}
+fn check_let_some_else_return_none(cx: &LateContext<'_>, stmt: &Stmt<'_>) {
+ if let StmtKind::Local(Local { pat, init: Some(init_expr), els: Some(els), .. }) = stmt.kind &&
+ let Block { stmts: &[], expr: Some(els), .. } = els &&
+ let Some(inner_pat) = pat_and_expr_can_be_question_mark(cx, pat, els)
+ {
+ let mut applicability = Applicability::MaybeIncorrect;
+ let init_expr_str = snippet_with_applicability(cx, init_expr.span, "..", &mut applicability);
+ let receiver_str = snippet_with_applicability(cx, inner_pat.span, "..", &mut applicability);
+ let sugg = format!(
+ "let {receiver_str} = {init_expr_str}?;",
+ );
+ span_lint_and_sugg(
+ cx,
+ QUESTION_MARK,
+ stmt.span,
+ "this `let...else` may be rewritten with the `?` operator",
+ "replace it with",
+ sugg,
+ applicability,
+ );
+ }
+}
+
fn is_early_return(smbl: Symbol, cx: &LateContext<'_>, if_block: &IfBlockType<'_>) -> bool {
match *if_block {
IfBlockType::IfIs(caller, caller_ty, call_sym, if_then, _) => {
@@ -259,8 +300,18 @@ fn is_try_block(cx: &LateContext<'_>, bl: &rustc_hir::Block<'_>) -> bool {
}
impl<'tcx> LateLintPass<'tcx> for QuestionMark {
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ if !is_lint_allowed(cx, QUESTION_MARK_USED, stmt.hir_id) {
+ return;
+ }
+
+ if !in_constant(cx, stmt.hir_id) {
+ check_let_some_else_return_none(cx, stmt);
+ }
+ self.check_manual_let_else(cx, stmt);
+ }
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if !in_constant(cx, expr.hir_id) {
+ if !in_constant(cx, expr.hir_id) && is_lint_allowed(cx, QUESTION_MARK_USED, expr.hir_id) {
self.check_is_none_or_err_and_early_return(cx, expr);
self.check_if_let_some_or_err_and_early_return(cx, expr);
}
@@ -291,4 +342,5 @@ impl<'tcx> LateLintPass<'tcx> for QuestionMark {
.expect("blocks are always part of bodies and must have a depth") -= 1;
}
}
+ extract_msrv_attr!(LateContext);
}
diff --git a/src/tools/clippy/clippy_lints/src/question_mark_used.rs b/src/tools/clippy/clippy_lints/src/question_mark_used.rs
index ff66b8a00..d0de33e3c 100644
--- a/src/tools/clippy/clippy_lints/src/question_mark_used.rs
+++ b/src/tools/clippy/clippy_lints/src/question_mark_used.rs
@@ -34,7 +34,7 @@ declare_lint_pass!(QuestionMarkUsed => [QUESTION_MARK_USED]);
impl<'tcx> LateLintPass<'tcx> for QuestionMarkUsed {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if let ExprKind::Match(_, _, MatchSource::TryDesugar) = expr.kind {
+ if let ExprKind::Match(_, _, MatchSource::TryDesugar(_)) = expr.kind {
if !span_is_local(expr.span) {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs
index d2018aba9..3287675a8 100644
--- a/src/tools/clippy/clippy_lints/src/ranges.rs
+++ b/src/tools/clippy/clippy_lints/src/ranges.rs
@@ -1,10 +1,9 @@
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then};
-use clippy_utils::higher;
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::{snippet, snippet_opt, snippet_with_applicability};
use clippy_utils::sugg::Sugg;
-use clippy_utils::{get_parent_expr, in_constant, is_integer_const, path_to_local};
+use clippy_utils::{get_parent_expr, higher, in_constant, is_integer_const, path_to_local};
use if_chain::if_chain;
use rustc_ast::ast::RangeLimits;
use rustc_errors::Applicability;
diff --git a/src/tools/clippy/clippy_lints/src/raw_strings.rs b/src/tools/clippy/clippy_lints/src/raw_strings.rs
index f45bb1ef3..ccabb577c 100644
--- a/src/tools/clippy/clippy_lints/src/raw_strings.rs
+++ b/src/tools/clippy/clippy_lints/src/raw_strings.rs
@@ -1,10 +1,10 @@
-use std::{iter::once, ops::ControlFlow};
+use std::iter::once;
+use std::ops::ControlFlow;
-use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet};
-use rustc_ast::{
- ast::{Expr, ExprKind},
- token::LitKind,
-};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use rustc_ast::ast::{Expr, ExprKind};
+use rustc_ast::token::LitKind;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
@@ -95,7 +95,7 @@ impl EarlyLintPass for RawStrings {
// `once` so a raw string ending in hashes is still checked
let num = str.as_bytes().iter().chain(once(&0)).try_fold(0u8, |acc, &b| {
match b {
- b'"' => (following_quote, req) = (true, 1),
+ b'"' if !following_quote => (following_quote, req) = (true, 1),
// I'm a bit surprised the compiler didn't optimize this out, there's no
// branch but it still ends up doing an unnecessary comparison, it's:
// - cmp r9b,1h
diff --git a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
index e82aa3a7b..8e85c55e7 100644
--- a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
+++ b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs
@@ -1,10 +1,9 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::higher::VecArgs;
-use clippy_utils::last_path_segment;
use clippy_utils::macros::root_macro_call_first_node;
-use clippy_utils::paths;
use clippy_utils::source::{indent_of, snippet};
use clippy_utils::ty::match_type;
+use clippy_utils::{last_path_segment, paths};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
@@ -50,9 +49,15 @@ declare_lint_pass!(RcCloneInVecInit => [RC_CLONE_IN_VEC_INIT]);
impl LateLintPass<'_> for RcCloneInVecInit {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return; };
- let Some(VecArgs::Repeat(elem, len)) = VecArgs::hir(cx, expr) else { return; };
- let Some((symbol, func_span)) = ref_init(cx, elem) else { return; };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
+ let Some(VecArgs::Repeat(elem, len)) = VecArgs::hir(cx, expr) else {
+ return;
+ };
+ let Some((symbol, func_span)) = ref_init(cx, elem) else {
+ return;
+ };
emit_lint(cx, symbol, macro_call.span, elem, len, func_span);
}
diff --git a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
index fa1078588..2bf90815c 100644
--- a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
+++ b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs
@@ -1,9 +1,7 @@
-use clippy_utils::{
- diagnostics::{span_lint, span_lint_and_sugg},
- higher::{get_vec_init_kind, VecInitKind},
- source::snippet,
- visitors::for_each_expr,
-};
+use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg};
+use clippy_utils::higher::{get_vec_init_kind, VecInitKind};
+use clippy_utils::source::snippet;
+use clippy_utils::visitors::for_each_expr;
use core::ops::ControlFlow;
use hir::{Expr, ExprKind, Local, PatKind, PathSegment, QPath, StmtKind};
use rustc_errors::Applicability;
diff --git a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
index 05e52e6b3..534b2762b 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs
@@ -1,15 +1,14 @@
use std::ops::ControlFlow;
-use clippy_utils::{
- diagnostics::span_lint_and_sugg,
- peel_blocks,
- source::{snippet, walk_span_to_context},
- visitors::for_each_expr,
-};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::peel_blocks;
+use clippy_utils::source::{snippet, walk_span_to_context};
+use clippy_utils::visitors::for_each_expr;
use rustc_errors::Applicability;
use rustc_hir::{AsyncGeneratorKind, Closure, Expr, ExprKind, GeneratorKind, MatchSource};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::{lint::in_external_macro, ty::UpvarCapture};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::UpvarCapture;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
diff --git a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
index b6ce4ebc2..fc49b58e0 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs
@@ -6,8 +6,7 @@ use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::intravisit as hir_visit;
-use rustc_hir::intravisit::Visitor as HirVisitor;
-use rustc_hir::intravisit::Visitor;
+use rustc_hir::intravisit::{Visitor as HirVisitor, Visitor};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
@@ -53,7 +52,7 @@ impl ReturnVisitor {
impl<'tcx> Visitor<'tcx> for ReturnVisitor {
fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) {
- if let hir::ExprKind::Ret(_) | hir::ExprKind::Match(.., hir::MatchSource::TryDesugar) = ex.kind {
+ if let hir::ExprKind::Ret(_) | hir::ExprKind::Match(.., hir::MatchSource::TryDesugar(_)) = ex.kind {
self.found_return = true;
} else {
hir_visit::walk_expr(self, ex);
diff --git a/src/tools/clippy/clippy_lints/src/redundant_locals.rs b/src/tools/clippy/clippy_lints/src/redundant_locals.rs
new file mode 100644
index 000000000..0c89c7ee4
--- /dev/null
+++ b/src/tools/clippy/clippy_lints/src/redundant_locals.rs
@@ -0,0 +1,126 @@
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::is_from_proc_macro;
+use clippy_utils::ty::needs_ordered_drop;
+use rustc_ast::Mutability;
+use rustc_hir::def::Res;
+use rustc_hir::{
+ BindingAnnotation, ByRef, Expr, ExprKind, HirId, Local, Node, Pat, PatKind, QPath,
+};
+use rustc_lint::{LateContext, LateLintPass, LintContext};
+use rustc_middle::lint::{in_external_macro, is_from_async_await};
+use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_span::symbol::Ident;
+use rustc_span::DesugaringKind;
+
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for redundant redefinitions of local bindings.
+ ///
+ /// ### Why is this bad?
+ /// Redundant redefinitions of local bindings do not change behavior and are likely to be unintended.
+ ///
+ /// Note that although these bindings do not affect your code's meaning, they _may_ affect `rustc`'s stack allocation.
+ ///
+ /// ### Example
+ /// ```rust
+ /// let a = 0;
+ /// let a = a;
+ ///
+ /// fn foo(b: i32) {
+ /// let b = b;
+ /// }
+ /// ```
+ /// Use instead:
+ /// ```rust
+ /// let a = 0;
+ /// // no redefinition with the same name
+ ///
+ /// fn foo(b: i32) {
+ /// // no redefinition with the same name
+ /// }
+ /// ```
+ #[clippy::version = "1.72.0"]
+ pub REDUNDANT_LOCALS,
+ correctness,
+ "redundant redefinition of a local binding"
+}
+declare_lint_pass!(RedundantLocals => [REDUNDANT_LOCALS]);
+
+impl<'tcx> LateLintPass<'tcx> for RedundantLocals {
+ fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'tcx>) {
+ if_chain! {
+ if !local.span.is_desugaring(DesugaringKind::Async);
+ // the pattern is a single by-value binding
+ if let PatKind::Binding(BindingAnnotation(ByRef::No, mutability), _, ident, None) = local.pat.kind;
+ // the binding is not type-ascribed
+ if local.ty.is_none();
+ // the expression is a resolved path
+ if let Some(expr) = local.init;
+ if let ExprKind::Path(qpath @ QPath::Resolved(None, path)) = expr.kind;
+ // the path is a single segment equal to the local's name
+ if let [last_segment] = path.segments;
+ if last_segment.ident == ident;
+ // resolve the path to its defining binding pattern
+ if let Res::Local(binding_id) = cx.qpath_res(&qpath, expr.hir_id);
+ if let Node::Pat(binding_pat) = cx.tcx.hir().get(binding_id);
+ // the previous binding has the same mutability
+ if find_binding(binding_pat, ident).unwrap().1 == mutability;
+ // the local does not change the effect of assignments to the binding. see #11290
+ if !affects_assignments(cx, mutability, binding_id, local.hir_id);
+ // the local does not affect the code's drop behavior
+ if !affects_drop_behavior(cx, binding_id, local.hir_id, expr);
+ // the local is user-controlled
+ if !in_external_macro(cx.sess(), local.span);
+ if !is_from_proc_macro(cx, expr);
+ // Async function parameters are lowered into the closure body, so we can't lint them.
+ // see `lower_maybe_async_body` in `rust_ast_lowering`
+ if !is_from_async_await(local.span);
+ then {
+ span_lint_and_help(
+ cx,
+ REDUNDANT_LOCALS,
+ vec![binding_pat.span, local.span],
+ "redundant redefinition of a binding",
+ None,
+ &format!("remove the redefinition of `{ident}`"),
+ );
+ }
+ }
+ }
+}
+
+/// Find the annotation of a binding introduced by a pattern, or `None` if it's not introduced.
+fn find_binding(pat: &Pat<'_>, name: Ident) -> Option<BindingAnnotation> {
+ let mut ret = None;
+
+ pat.each_binding_or_first(&mut |annotation, _, _, ident| {
+ if ident == name {
+ ret = Some(annotation);
+ }
+ });
+
+ ret
+}
+
+/// Check if a rebinding of a local changes the effect of assignments to the binding.
+fn affects_assignments(cx: &LateContext<'_>, mutability: Mutability, bind: HirId, rebind: HirId) -> bool {
+ let hir = cx.tcx.hir();
+
+ // the binding is mutable and the rebinding is in a different scope than the original binding
+ mutability == Mutability::Mut && hir.get_enclosing_scope(bind) != hir.get_enclosing_scope(rebind)
+}
+
+/// Check if a rebinding of a local affects the code's drop behavior.
+fn affects_drop_behavior<'tcx>(
+ cx: &LateContext<'tcx>,
+ bind: HirId,
+ rebind: HirId,
+ rebind_expr: &Expr<'tcx>,
+) -> bool {
+ let hir = cx.tcx.hir();
+
+ // the rebinding is in a different scope than the original binding
+ // and the type of the binding cares about drop order
+ hir.get_enclosing_scope(bind) != hir.get_enclosing_scope(rebind)
+ && needs_ordered_drop(cx, cx.typeck_results().expr_ty(rebind_expr))
+}
diff --git a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
index c70ce83a9..4abfa0fc3 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs
@@ -7,9 +7,8 @@ use rustc_ast::util::parser::PREC_PREFIX;
use rustc_errors::Applicability;
use rustc_hir::{BorrowKind, Expr, ExprKind, LangItem, Mutability};
use rustc_lint::{LateContext, LateLintPass, Lint};
-use rustc_middle::ty::Ty;
use rustc_middle::ty::adjustment::{Adjust, AutoBorrow, AutoBorrowMutability};
-use rustc_middle::ty::subst::GenericArg;
+use rustc_middle::ty::{GenericArg, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
@@ -82,7 +81,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantSlicing {
if_chain! {
if let ExprKind::AddrOf(BorrowKind::Ref, mutability, addressee) = expr.kind;
if addressee.span.ctxt() == ctxt;
- if let ExprKind::Index(indexed, range) = addressee.kind;
+ if let ExprKind::Index(indexed, range, _) = addressee.kind;
if is_type_lang_item(cx, cx.typeck_results().expr_ty_adjusted(range), LangItem::RangeFull);
then {
let (expr_ty, expr_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(expr));
@@ -135,7 +134,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantSlicing {
} else if let Some(target_id) = cx.tcx.lang_items().deref_target() {
if let Ok(deref_ty) = cx.tcx.try_normalize_erasing_regions(
cx.param_env,
- Ty::new_projection(cx.tcx,target_id, cx.tcx.mk_substs(&[GenericArg::from(indexed_ty)])),
+ Ty::new_projection(cx.tcx,target_id, cx.tcx.mk_args(&[GenericArg::from(indexed_ty)])),
) {
if deref_ty == expr_ty {
let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0;
diff --git a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
index 038dfe8e4..ed42a422b 100644
--- a/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
+++ b/src/tools/clippy/clippy_lints/src/redundant_static_lifetimes.rs
@@ -5,6 +5,7 @@ use rustc_ast::ast::{ConstItem, Item, ItemKind, StaticItem, Ty, TyKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::symbol::kw;
declare_clippy_lint! {
/// ### What it does
@@ -64,7 +65,7 @@ impl RedundantStaticLifetimes {
if let Some(lifetime) = *optional_lifetime {
match borrow_type.ty.kind {
TyKind::Path(..) | TyKind::Slice(..) | TyKind::Array(..) | TyKind::Tup(..) => {
- if lifetime.ident.name == rustc_span::symbol::kw::StaticLifetime {
+ if lifetime.ident.name == kw::StaticLifetime {
let snip = snippet(cx, borrow_type.ty.span, "<type>");
let sugg = format!("&{}{snip}", borrow_type.mutbl.prefix_str());
span_lint_and_then(
diff --git a/src/tools/clippy/clippy_lints/src/reference.rs b/src/tools/clippy/clippy_lints/src/reference.rs
index a642e2da3..db870ec4c 100644
--- a/src/tools/clippy/clippy_lints/src/reference.rs
+++ b/src/tools/clippy/clippy_lints/src/reference.rs
@@ -94,7 +94,7 @@ impl EarlyLintPass for DerefAddrOf {
DEREF_ADDROF,
e.span,
"immediately dereferencing a reference",
- "try this",
+ "try",
sugg.to_string(),
applicability,
);
diff --git a/src/tools/clippy/clippy_lints/src/regex.rs b/src/tools/clippy/clippy_lints/src/regex.rs
index 674f8bf4c..b795e4b15 100644
--- a/src/tools/clippy/clippy_lints/src/regex.rs
+++ b/src/tools/clippy/clippy_lints/src/regex.rs
@@ -3,12 +3,12 @@ use std::fmt::Display;
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::source::snippet_opt;
-use clippy_utils::{match_def_path, paths};
-use if_chain::if_chain;
+use clippy_utils::{def_path_def_ids, path_def_id, paths};
use rustc_ast::ast::{LitKind, StrStyle};
+use rustc_hir::def_id::DefIdMap;
use rustc_hir::{BorrowKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
-use rustc_session::{declare_lint_pass, declare_tool_lint};
+use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::{BytePos, Span};
declare_clippy_lint! {
@@ -55,26 +55,52 @@ declare_clippy_lint! {
"trivial regular expressions"
}
-declare_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]);
+#[derive(Copy, Clone)]
+enum RegexKind {
+ Unicode,
+ UnicodeSet,
+ Bytes,
+ BytesSet,
+}
+
+#[derive(Default)]
+pub struct Regex {
+ definitions: DefIdMap<RegexKind>,
+}
+
+impl_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]);
impl<'tcx> LateLintPass<'tcx> for Regex {
+ fn check_crate(&mut self, cx: &LateContext<'tcx>) {
+ // We don't use `match_def_path` here because that relies on matching the exact path, which changed
+ // between regex 1.8 and 1.9
+ //
+ // `def_path_def_ids` will resolve through re-exports but is relatively heavy, so we only perform
+ // the operation once and store the results
+ let mut resolve = |path, kind| {
+ for id in def_path_def_ids(cx, path) {
+ self.definitions.insert(id, kind);
+ }
+ };
+
+ resolve(&paths::REGEX_NEW, RegexKind::Unicode);
+ resolve(&paths::REGEX_BUILDER_NEW, RegexKind::Unicode);
+ resolve(&paths::REGEX_SET_NEW, RegexKind::UnicodeSet);
+ resolve(&paths::REGEX_BYTES_NEW, RegexKind::Bytes);
+ resolve(&paths::REGEX_BYTES_BUILDER_NEW, RegexKind::Bytes);
+ resolve(&paths::REGEX_BYTES_SET_NEW, RegexKind::BytesSet);
+ }
+
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- if_chain! {
- if let ExprKind::Call(fun, [arg]) = expr.kind;
- if let ExprKind::Path(ref qpath) = fun.kind;
- if let Some(def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id();
- then {
- if match_def_path(cx, def_id, &paths::REGEX_NEW) ||
- match_def_path(cx, def_id, &paths::REGEX_BUILDER_NEW) {
- check_regex(cx, arg, true);
- } else if match_def_path(cx, def_id, &paths::REGEX_BYTES_NEW) ||
- match_def_path(cx, def_id, &paths::REGEX_BYTES_BUILDER_NEW) {
- check_regex(cx, arg, false);
- } else if match_def_path(cx, def_id, &paths::REGEX_SET_NEW) {
- check_set(cx, arg, true);
- } else if match_def_path(cx, def_id, &paths::REGEX_BYTES_SET_NEW) {
- check_set(cx, arg, false);
- }
+ if let ExprKind::Call(fun, [arg]) = expr.kind
+ && let Some(def_id) = path_def_id(cx, fun)
+ && let Some(regex_kind) = self.definitions.get(&def_id)
+ {
+ match regex_kind {
+ RegexKind::Unicode => check_regex(cx, arg, true),
+ RegexKind::UnicodeSet => check_set(cx, arg, true),
+ RegexKind::Bytes => check_regex(cx, arg, false),
+ RegexKind::BytesSet => check_set(cx, arg, false),
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/renamed_lints.rs b/src/tools/clippy/clippy_lints/src/renamed_lints.rs
index 44e7cbfba..fc1fabcc0 100644
--- a/src/tools/clippy/clippy_lints/src/renamed_lints.rs
+++ b/src/tools/clippy/clippy_lints/src/renamed_lints.rs
@@ -30,6 +30,7 @@ pub static RENAMED_LINTS: &[(&str, &str)] = &[
("clippy::single_char_push_str", "clippy::single_char_add_str"),
("clippy::stutter", "clippy::module_name_repetitions"),
("clippy::to_string_in_display", "clippy::recursive_format_impl"),
+ ("clippy::unwrap_or_else_default", "clippy::unwrap_or_default"),
("clippy::zero_width_space", "clippy::invisible_characters"),
("clippy::cast_ref_to_mut", "invalid_reference_casting"),
("clippy::clone_double_ref", "suspicious_double_ref_op"),
@@ -42,6 +43,7 @@ pub static RENAMED_LINTS: &[(&str, &str)] = &[
("clippy::for_loops_over_fallibles", "for_loops_over_fallibles"),
("clippy::forget_copy", "forgetting_copy_types"),
("clippy::forget_ref", "forgetting_references"),
+ ("clippy::fn_null_check", "useless_ptr_null_checks"),
("clippy::into_iter_on_array", "array_into_iter"),
("clippy::invalid_atomic_ordering", "invalid_atomic_ordering"),
("clippy::invalid_ref", "invalid_value"),
diff --git a/src/tools/clippy/clippy_lints/src/returns.rs b/src/tools/clippy/clippy_lints/src/returns.rs
index 958351ad8..d6b9a49d2 100644
--- a/src/tools/clippy/clippy_lints/src/returns.rs
+++ b/src/tools/clippy/clippy_lints/src/returns.rs
@@ -1,15 +1,17 @@
-use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then};
+use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then, span_lint_hir_and_then};
use clippy_utils::source::{snippet_opt, snippet_with_context};
-use clippy_utils::visitors::{for_each_expr, Descend};
-use clippy_utils::{fn_def_id, path_to_local_id, span_find_starting_semi};
+use clippy_utils::visitors::{for_each_expr_with_closures, Descend};
+use clippy_utils::{fn_def_id, is_from_proc_macro, path_to_local_id, span_find_starting_semi};
use core::ops::ControlFlow;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
-use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, LangItem, MatchSource, PatKind, QPath, StmtKind};
+use rustc_hir::{
+ Block, Body, Expr, ExprKind, FnDecl, ItemKind, LangItem, MatchSource, OwnerNode, PatKind, QPath, Stmt, StmtKind,
+};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
-use rustc_middle::ty::{self, subst::GenericArgKind, Ty};
+use rustc_middle::ty::{self, GenericArgKind, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::def_id::LocalDefId;
use rustc_span::source_map::Span;
@@ -76,6 +78,46 @@ declare_clippy_lint! {
"using a return statement like `return expr;` where an expression would suffice"
}
+declare_clippy_lint! {
+ /// ### What it does
+ /// Checks for return statements on `Err` paired with the `?` operator.
+ ///
+ /// ### Why is this bad?
+ /// The `return` is unnecessary.
+ ///
+ /// ### Example
+ /// ```rust,ignore
+ /// fn foo(x: usize) -> Result<(), Box<dyn Error>> {
+ /// if x == 0 {
+ /// return Err(...)?;
+ /// }
+ /// Ok(())
+ /// }
+ /// ```
+ /// simplify to
+ /// ```rust,ignore
+ /// fn foo(x: usize) -> Result<(), Box<dyn Error>> {
+ /// if x == 0 {
+ /// Err(...)?;
+ /// }
+ /// Ok(())
+ /// }
+ /// ```
+ /// if paired with `try_err`, use instead:
+ /// ```rust,ignore
+ /// fn foo(x: usize) -> Result<(), Box<dyn Error>> {
+ /// if x == 0 {
+ /// return Err(...);
+ /// }
+ /// Ok(())
+ /// }
+ /// ```
+ #[clippy::version = "1.73.0"]
+ pub NEEDLESS_RETURN_WITH_QUESTION_MARK,
+ style,
+ "using a return statement like `return Err(expr)?;` where removing it would suffice"
+}
+
#[derive(PartialEq, Eq)]
enum RetReplacement<'tcx> {
Empty,
@@ -115,9 +157,35 @@ impl<'tcx> ToString for RetReplacement<'tcx> {
}
}
-declare_lint_pass!(Return => [LET_AND_RETURN, NEEDLESS_RETURN]);
+declare_lint_pass!(Return => [LET_AND_RETURN, NEEDLESS_RETURN, NEEDLESS_RETURN_WITH_QUESTION_MARK]);
impl<'tcx> LateLintPass<'tcx> for Return {
+ fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
+ if !in_external_macro(cx.sess(), stmt.span)
+ && let StmtKind::Semi(expr) = stmt.kind
+ && let ExprKind::Ret(Some(ret)) = expr.kind
+ && let ExprKind::Match(.., MatchSource::TryDesugar(_)) = ret.kind
+ // Ensure this is not the final stmt, otherwise removing it would cause a compile error
+ && let OwnerNode::Item(item) = cx.tcx.hir().owner(cx.tcx.hir().get_parent_item(expr.hir_id))
+ && let ItemKind::Fn(_, _, body) = item.kind
+ && let block = cx.tcx.hir().body(body).value
+ && let ExprKind::Block(block, _) = block.kind
+ && let [.., final_stmt] = block.stmts
+ && final_stmt.hir_id != stmt.hir_id
+ && !is_from_proc_macro(cx, expr)
+ {
+ span_lint_and_sugg(
+ cx,
+ NEEDLESS_RETURN_WITH_QUESTION_MARK,
+ expr.span.until(ret.span),
+ "unneeded `return` statement with `?` operator",
+ "remove it",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'_>) {
// we need both a let-binding stmt and an expr
if_chain! {
@@ -173,6 +241,10 @@ impl<'tcx> LateLintPass<'tcx> for Return {
sp: Span,
_: LocalDefId,
) {
+ if sp.from_expansion() {
+ return;
+ }
+
match kind {
FnKind::Closure => {
// when returning without value in closure, replace this `return`
@@ -328,16 +400,16 @@ fn emit_return_lint(cx: &LateContext<'_>, ret_span: Span, semi_spans: Vec<Span>,
}
fn last_statement_borrows<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
- for_each_expr(expr, |e| {
+ for_each_expr_with_closures(cx, expr, |e| {
if let Some(def_id) = fn_def_id(cx, e)
&& cx
.tcx
.fn_sig(def_id)
- .subst_identity()
+ .instantiate_identity()
.skip_binder()
.output()
.walk()
- .any(|arg| matches!(arg.unpack(), GenericArgKind::Lifetime(_)))
+ .any(|arg| matches!(arg.unpack(), GenericArgKind::Lifetime(re) if !re.is_static()))
{
ControlFlow::Break(())
} else {
diff --git a/src/tools/clippy/clippy_lints/src/self_named_constructors.rs b/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
index beca203c8..b92014f68 100644
--- a/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
+++ b/src/tools/clippy/clippy_lints/src/self_named_constructors.rs
@@ -53,7 +53,7 @@ impl<'tcx> LateLintPass<'tcx> for SelfNamedConstructors {
let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id()).def_id;
let item = cx.tcx.hir().expect_item(parent);
- let self_ty = cx.tcx.type_of(item.owner_id).subst_identity();
+ let self_ty = cx.tcx.type_of(item.owner_id).instantiate_identity();
let ret_ty = return_ty(cx, impl_item.owner_id);
// Do not check trait impls
diff --git a/src/tools/clippy/clippy_lints/src/semicolon_block.rs b/src/tools/clippy/clippy_lints/src/semicolon_block.rs
index 419d7991f..88f295c72 100644
--- a/src/tools/clippy/clippy_lints/src/semicolon_block.rs
+++ b/src/tools/clippy/clippy_lints/src/semicolon_block.rs
@@ -148,12 +148,18 @@ impl LateLintPass<'_> for SemicolonBlock {
expr: None,
stmts: [.., stmt],
..
- } = block else { return };
+ } = block
+ else {
+ return;
+ };
let &Stmt {
kind: StmtKind::Semi(expr),
span,
..
- } = stmt else { return };
+ } = stmt
+ else {
+ return;
+ };
self.semicolon_outside_block(cx, block, expr, span);
},
StmtKind::Semi(Expr {
diff --git a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
index 355f907e2..c9547cd95 100644
--- a/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
+++ b/src/tools/clippy/clippy_lints/src/semicolon_if_nothing_returned.rs
@@ -43,7 +43,7 @@ impl<'tcx> LateLintPass<'tcx> for SemicolonIfNothingReturned {
if let Some(expr) = block.expr;
let t_expr = cx.typeck_results().expr_ty(expr);
if t_expr.is_unit();
- let mut app = Applicability::MaybeIncorrect;
+ let mut app = Applicability::MachineApplicable;
if let snippet = snippet_with_context(cx, expr.span, block.span.ctxt(), "}", &mut app).0;
if !snippet.ends_with('}') && !snippet.ends_with(';');
if cx.sess().source_map().is_multiline(block.span);
diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs
index 993f9373d..78418b223 100644
--- a/src/tools/clippy/clippy_lints/src/shadow.rs
+++ b/src/tools/clippy/clippy_lints/src/shadow.rs
@@ -106,7 +106,9 @@ impl_lint_pass!(Shadow => [SHADOW_SAME, SHADOW_REUSE, SHADOW_UNRELATED]);
impl<'tcx> LateLintPass<'tcx> for Shadow {
fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
- let PatKind::Binding(_, id, ident, _) = pat.kind else { return };
+ let PatKind::Binding(_, id, ident, _) = pat.kind else {
+ return;
+ };
if pat.span.desugaring_kind().is_some() || pat.span.from_expansion() {
return;
diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
index fffa8a380..4b248c9c7 100644
--- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
+++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs
@@ -1,18 +1,16 @@
-use clippy_utils::{
- diagnostics::span_lint_and_then,
- expr_or_init, get_attr, path_to_local,
- source::{indent_of, snippet},
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::source::{indent_of, snippet};
+use clippy_utils::{expr_or_init, get_attr, path_to_local, peel_hir_expr_unary};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_errors::Applicability;
-use rustc_hir::{
- self as hir,
- intravisit::{walk_expr, Visitor},
-};
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::intravisit::{walk_expr, Visitor};
+use rustc_hir::{self as hir};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::ty::{subst::GenericArgKind, Ty, TypeAndMut};
+use rustc_middle::ty::{GenericArgKind, Ty, TypeAndMut};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{symbol::Ident, Span, DUMMY_SP};
+use rustc_span::symbol::Ident;
+use rustc_span::{sym, Span, DUMMY_SP};
use std::borrow::Cow;
declare_clippy_lint! {
@@ -237,7 +235,7 @@ impl<'ap, 'lc, 'others, 'stmt, 'tcx> StmtsChecker<'ap, 'lc, 'others, 'stmt, 'tcx
fn manage_has_expensive_expr_after_last_attr(&mut self) {
let has_expensive_stmt = match self.ap.curr_stmt.kind {
- hir::StmtKind::Expr(expr) if !is_expensive_expr(expr) => false,
+ hir::StmtKind::Expr(expr) if is_inexpensive_expr(expr) => false,
hir::StmtKind::Local(local) if let Some(expr) = local.init
&& let hir::ExprKind::Path(_) = expr.kind => false,
_ => true
@@ -332,13 +330,13 @@ impl<'ap, 'lc, 'others, 'stmt, 'tcx> Visitor<'tcx> for StmtsChecker<'ap, 'lc, 'o
apa.last_method_span = span;
}
},
- hir::StmtKind::Semi(expr) => {
- if has_drop(expr, &apa.first_bind_ident) {
+ hir::StmtKind::Semi(semi_expr) => {
+ if has_drop(semi_expr, &apa.first_bind_ident, self.cx) {
apa.has_expensive_expr_after_last_attr = false;
apa.last_stmt_span = DUMMY_SP;
return;
}
- if let hir::ExprKind::MethodCall(_, _, _, span) = expr.kind {
+ if let hir::ExprKind::MethodCall(_, _, _, span) = semi_expr.kind {
apa.last_method_span = span;
}
},
@@ -430,22 +428,37 @@ fn dummy_stmt_expr<'any>(expr: &'any hir::Expr<'any>) -> hir::Stmt<'any> {
}
}
-fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: &Ident) -> bool {
+fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: &Ident, lcx: &LateContext<'_>) -> bool {
if let hir::ExprKind::Call(fun, args) = expr.kind
&& let hir::ExprKind::Path(hir::QPath::Resolved(_, fun_path)) = &fun.kind
- && let [fun_ident, ..] = fun_path.segments
- && fun_ident.ident.name == rustc_span::sym::drop
+ && let Res::Def(DefKind::Fn, did) = fun_path.res
+ && lcx.tcx.is_diagnostic_item(sym::mem_drop, did)
&& let [first_arg, ..] = args
- && let hir::ExprKind::Path(hir::QPath::Resolved(_, arg_path)) = &first_arg.kind
- && let [first_arg_ps, .. ] = arg_path.segments
{
- &first_arg_ps.ident == first_bind_ident
- }
- else {
- false
+ let has_ident = |local_expr: &hir::Expr<'_>| {
+ if let hir::ExprKind::Path(hir::QPath::Resolved(_, arg_path)) = &local_expr.kind
+ && let [first_arg_ps, .. ] = arg_path.segments
+ && &first_arg_ps.ident == first_bind_ident
+ {
+ true
+ }
+ else {
+ false
+ }
+ };
+ if has_ident(first_arg) {
+ return true;
+ }
+ if let hir::ExprKind::Tup(value) = &first_arg.kind && value.iter().any(has_ident) {
+ return true;
+ }
}
+ false
}
-fn is_expensive_expr(expr: &hir::Expr<'_>) -> bool {
- !matches!(expr.kind, hir::ExprKind::Path(_))
+fn is_inexpensive_expr(expr: &hir::Expr<'_>) -> bool {
+ let actual = peel_hir_expr_unary(expr).0;
+ let is_path = matches!(actual.kind, hir::ExprKind::Path(_));
+ let is_lit = matches!(actual.kind, hir::ExprKind::Lit(_));
+ is_path || is_lit
}
diff --git a/src/tools/clippy/clippy_lints/src/single_call_fn.rs b/src/tools/clippy/clippy_lints/src/single_call_fn.rs
index 42753d2e9..7bbe98e0a 100644
--- a/src/tools/clippy/clippy_lints/src/single_call_fn.rs
+++ b/src/tools/clippy/clippy_lints/src/single_call_fn.rs
@@ -2,8 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::{is_from_proc_macro, is_in_test_function};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::LocalDefId;
-use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::{intravisit::FnKind, Body, Expr, ExprKind, FnDecl};
+use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
+use rustc_hir::{Body, Expr, ExprKind, FnDecl};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::nested_filter::OnlyBodies;
use rustc_middle::lint::in_external_macro;
diff --git a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
index 5743dd21c..9c21d70c8 100644
--- a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs
@@ -1,11 +1,14 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg};
use rustc_ast::node_id::{NodeId, NodeMap};
+use rustc_ast::ptr::P;
use rustc_ast::visit::{walk_expr, Visitor};
-use rustc_ast::{ptr::P, Crate, Expr, ExprKind, Item, ItemKind, MacroDef, ModKind, Ty, TyKind, UseTreeKind};
+use rustc_ast::{Crate, Expr, ExprKind, Item, ItemKind, MacroDef, ModKind, Ty, TyKind, UseTreeKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{edition::Edition, symbol::kw, Span, Symbol};
+use rustc_span::edition::Edition;
+use rustc_span::symbol::kw;
+use rustc_span::{Span, Symbol};
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs
index dfe8be7a6..321c89889 100644
--- a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs
+++ b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs
@@ -1,7 +1,9 @@
-use clippy_utils::{
- diagnostics::span_lint_and_then, get_trait_def_id, higher::VecArgs, macros::root_macro_call_first_node,
- source::snippet_opt, ty::implements_trait,
-};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::get_trait_def_id;
+use clippy_utils::higher::VecArgs;
+use clippy_utils::macros::root_macro_call_first_node;
+use clippy_utils::source::snippet_opt;
+use clippy_utils::ty::implements_trait;
use rustc_ast::{LitIntType, LitKind, UintTy};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, QPath};
diff --git a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
index ac4e29e9d..bd783b4e0 100644
--- a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
+++ b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs
@@ -4,8 +4,7 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::{match_def_path, paths};
use if_chain::if_chain;
-use rustc_hir::BinOpKind;
-use rustc_hir::{Expr, ExprKind};
+use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty, TypeAndMut};
use rustc_session::{declare_lint_pass, declare_tool_lint};
@@ -47,7 +46,7 @@ fn get_size_of_ty<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, inverted:
if let Some(def_id) = cx.qpath_res(count_func_qpath, count_func.hir_id).opt_def_id();
if matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::mem_size_of | sym::mem_size_of_val));
then {
- cx.typeck_results().node_substs(count_func.hir_id).types().next()
+ cx.typeck_results().node_args(count_func.hir_id).types().next()
} else {
None
}
@@ -101,7 +100,7 @@ fn get_pointee_ty_and_count_expr<'tcx>(
if FUNCTIONS.iter().any(|func_path| match_def_path(cx, def_id, func_path));
// Get the pointee type
- if let Some(pointee_ty) = cx.typeck_results().node_substs(func.hir_id).types().next();
+ if let Some(pointee_ty) = cx.typeck_results().node_args(func.hir_id).types().next();
then {
return Some((pointee_ty, count));
}
diff --git a/src/tools/clippy/clippy_lints/src/size_of_ref.rs b/src/tools/clippy/clippy_lints/src/size_of_ref.rs
index 8abec06c6..89ac8cd8c 100644
--- a/src/tools/clippy/clippy_lints/src/size_of_ref.rs
+++ b/src/tools/clippy/clippy_lints/src/size_of_ref.rs
@@ -1,4 +1,6 @@
-use clippy_utils::{diagnostics::span_lint_and_help, path_def_id, ty::peel_mid_ty_refs};
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::path_def_id;
+use clippy_utils::ty::peel_mid_ty_refs;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
diff --git a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
index 858135c8d..c9ab622ad 100644
--- a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
+++ b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs
@@ -1,13 +1,13 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::sugg::Sugg;
-use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{
- get_enclosing_block, is_integer_literal, is_path_diagnostic_item, path_to_local, path_to_local_id, SpanlessEq,
+ get_enclosing_block, is_expr_path_def_path, is_integer_literal, is_path_diagnostic_item, path_to_local,
+ path_to_local_id, paths, SpanlessEq,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_block, walk_expr, walk_stmt, Visitor};
-use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, PatKind, QPath, Stmt, StmtKind};
+use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, PatKind, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::sym;
@@ -20,18 +20,27 @@ declare_clippy_lint! {
/// These structures are non-idiomatic and less efficient than simply using
/// `vec![0; len]`.
///
+ /// Specifically, for `vec![0; len]`, the compiler can use a specialized type of allocation
+ /// that also zero-initializes the allocated memory in the same call
+ /// (see: [alloc_zeroed](https://doc.rust-lang.org/stable/std/alloc/trait.GlobalAlloc.html#method.alloc_zeroed)).
+ ///
+ /// Writing `Vec::new()` followed by `vec.resize(len, 0)` is suboptimal because,
+ /// while it does do the same number of allocations,
+ /// it involves two operations for allocating and initializing.
+ /// The `resize` call first allocates memory (since `Vec::new()` did not), and only *then* zero-initializes it.
+ ///
/// ### Example
/// ```rust
/// # use core::iter::repeat;
/// # let len = 4;
- /// let mut vec1 = Vec::with_capacity(len);
+ /// let mut vec1 = Vec::new();
/// vec1.resize(len, 0);
///
- /// let mut vec1 = Vec::with_capacity(len);
- /// vec1.resize(vec1.capacity(), 0);
- ///
/// let mut vec2 = Vec::with_capacity(len);
- /// vec2.extend(repeat(0).take(len));
+ /// vec2.resize(len, 0);
+ ///
+ /// let mut vec3 = Vec::with_capacity(len);
+ /// vec3.extend(repeat(0).take(len));
/// ```
///
/// Use instead:
@@ -39,6 +48,7 @@ declare_clippy_lint! {
/// # let len = 4;
/// let mut vec1 = vec![0; len];
/// let mut vec2 = vec![0; len];
+ /// let mut vec3 = vec![0; len];
/// ```
#[clippy::version = "1.32.0"]
pub SLOW_VECTOR_INITIALIZATION,
@@ -60,7 +70,24 @@ struct VecAllocation<'tcx> {
/// Reference to the expression used as argument on `with_capacity` call. This is used
/// to only match slow zero-filling idioms of the same length than vector initialization.
- len_expr: &'tcx Expr<'tcx>,
+ size_expr: InitializedSize<'tcx>,
+}
+
+/// Initializer for the creation of the vector.
+///
+/// When `Vec::with_capacity(size)` is found, the `size` expression will be in
+/// `InitializedSize::Initialized`.
+///
+/// Otherwise, for `Vec::new()` calls, there is no allocation initializer yet, so
+/// `InitializedSize::Uninitialized` is used.
+/// Later, when a call to `.resize(size, 0)` or similar is found, it's set
+/// to `InitializedSize::Initialized(size)`.
+///
+/// Since it will be set to `InitializedSize::Initialized(size)` when a slow initialization is
+/// found, it is always safe to "unwrap" it at lint time.
+enum InitializedSize<'tcx> {
+ Initialized(&'tcx Expr<'tcx>),
+ Uninitialized,
}
/// Type of slow initialization
@@ -77,18 +104,14 @@ impl<'tcx> LateLintPass<'tcx> for SlowVectorInit {
// Matches initialization on reassignments. For example: `vec = Vec::with_capacity(100)`
if_chain! {
if let ExprKind::Assign(left, right, _) = expr.kind;
-
- // Extract variable
if let Some(local_id) = path_to_local(left);
-
- // Extract len argument
- if let Some(len_arg) = Self::is_vec_with_capacity(cx, right);
+ if let Some(size_expr) = Self::as_vec_initializer(cx, right);
then {
let vi = VecAllocation {
local_id,
allocation_expr: right,
- len_expr: len_arg,
+ size_expr,
};
Self::search_initialization(cx, vi, expr.hir_id);
@@ -98,17 +121,18 @@ impl<'tcx> LateLintPass<'tcx> for SlowVectorInit {
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
// Matches statements which initializes vectors. For example: `let mut vec = Vec::with_capacity(10)`
+ // or `Vec::new()`
if_chain! {
if let StmtKind::Local(local) = stmt.kind;
if let PatKind::Binding(BindingAnnotation::MUT, local_id, _, None) = local.pat.kind;
if let Some(init) = local.init;
- if let Some(len_arg) = Self::is_vec_with_capacity(cx, init);
+ if let Some(size_expr) = Self::as_vec_initializer(cx, init);
then {
let vi = VecAllocation {
local_id,
allocation_expr: init,
- len_expr: len_arg,
+ size_expr,
};
Self::search_initialization(cx, vi, stmt.hir_id);
@@ -118,19 +142,20 @@ impl<'tcx> LateLintPass<'tcx> for SlowVectorInit {
}
impl SlowVectorInit {
- /// Checks if the given expression is `Vec::with_capacity(..)`. It will return the expression
- /// of the first argument of `with_capacity` call if it matches or `None` if it does not.
- fn is_vec_with_capacity<'tcx>(cx: &LateContext<'_>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
- if_chain! {
- if let ExprKind::Call(func, [arg]) = expr.kind;
- if let ExprKind::Path(QPath::TypeRelative(ty, name)) = func.kind;
- if name.ident.as_str() == "with_capacity";
- if is_type_diagnostic_item(cx, cx.typeck_results().node_type(ty.hir_id), sym::Vec);
- then {
- Some(arg)
- } else {
- None
- }
+ /// Looks for `Vec::with_capacity(size)` or `Vec::new()` calls and returns the initialized size,
+ /// if any. More specifically, it returns:
+ /// - `Some(InitializedSize::Initialized(size))` for `Vec::with_capacity(size)`
+ /// - `Some(InitializedSize::Uninitialized)` for `Vec::new()`
+ /// - `None` for other, unrelated kinds of expressions
+ fn as_vec_initializer<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<InitializedSize<'tcx>> {
+ if let ExprKind::Call(func, [len_expr]) = expr.kind
+ && is_expr_path_def_path(cx, func, &paths::VEC_WITH_CAPACITY)
+ {
+ Some(InitializedSize::Initialized(len_expr))
+ } else if matches!(expr.kind, ExprKind::Call(func, _) if is_expr_path_def_path(cx, func, &paths::VEC_NEW)) {
+ Some(InitializedSize::Uninitialized)
+ } else {
+ None
}
}
@@ -169,12 +194,19 @@ impl SlowVectorInit {
}
fn emit_lint(cx: &LateContext<'_>, slow_fill: &Expr<'_>, vec_alloc: &VecAllocation<'_>, msg: &str) {
- let len_expr = Sugg::hir(cx, vec_alloc.len_expr, "len");
+ let len_expr = Sugg::hir(
+ cx,
+ match vec_alloc.size_expr {
+ InitializedSize::Initialized(expr) => expr,
+ InitializedSize::Uninitialized => unreachable!("size expression must be set by this point"),
+ },
+ "len",
+ );
span_lint_and_then(cx, SLOW_VECTOR_INITIALIZATION, slow_fill.span, msg, |diag| {
diag.span_suggestion(
vec_alloc.allocation_expr.span,
- "consider replace allocation with",
+ "consider replacing this with",
format!("vec![0; {len_expr}]"),
Applicability::Unspecified,
);
@@ -214,36 +246,45 @@ impl<'a, 'tcx> VectorInitializationVisitor<'a, 'tcx> {
}
/// Checks if the given expression is resizing a vector with 0
- fn search_slow_resize_filling(&mut self, expr: &'tcx Expr<'_>) {
+ fn search_slow_resize_filling(&mut self, expr: &'tcx Expr<'tcx>) {
if self.initialization_found
&& let ExprKind::MethodCall(path, self_arg, [len_arg, fill_arg], _) = expr.kind
&& path_to_local_id(self_arg, self.vec_alloc.local_id)
&& path.ident.name == sym!(resize)
// Check that is filled with 0
- && is_integer_literal(fill_arg, 0) {
- // Check that len expression is equals to `with_capacity` expression
- if SpanlessEq::new(self.cx).eq_expr(len_arg, self.vec_alloc.len_expr) {
- self.slow_expression = Some(InitializationType::Resize(expr));
- } else if let ExprKind::MethodCall(path, ..) = len_arg.kind && path.ident.as_str() == "capacity" {
- self.slow_expression = Some(InitializationType::Resize(expr));
- }
+ && is_integer_literal(fill_arg, 0)
+ {
+ let is_matching_resize = if let InitializedSize::Initialized(size_expr) = self.vec_alloc.size_expr {
+ // If we have a size expression, check that it is equal to what's passed to `resize`
+ SpanlessEq::new(self.cx).eq_expr(len_arg, size_expr)
+ || matches!(len_arg.kind, ExprKind::MethodCall(path, ..) if path.ident.as_str() == "capacity")
+ } else {
+ self.vec_alloc.size_expr = InitializedSize::Initialized(len_arg);
+ true
+ };
+
+ if is_matching_resize {
+ self.slow_expression = Some(InitializationType::Resize(expr));
}
+ }
}
/// Returns `true` if give expression is `repeat(0).take(...)`
- fn is_repeat_take(&self, expr: &Expr<'_>) -> bool {
+ fn is_repeat_take(&mut self, expr: &'tcx Expr<'tcx>) -> bool {
if_chain! {
if let ExprKind::MethodCall(take_path, recv, [len_arg, ..], _) = expr.kind;
if take_path.ident.name == sym!(take);
// Check that take is applied to `repeat(0)`
if self.is_repeat_zero(recv);
then {
- // Check that len expression is equals to `with_capacity` expression
- if SpanlessEq::new(self.cx).eq_expr(len_arg, self.vec_alloc.len_expr) {
- return true;
- } else if let ExprKind::MethodCall(path, ..) = len_arg.kind && path.ident.as_str() == "capacity" {
- return true;
+ if let InitializedSize::Initialized(size_expr) = self.vec_alloc.size_expr {
+ // Check that len expression is equals to `with_capacity` expression
+ return SpanlessEq::new(self.cx).eq_expr(len_arg, size_expr)
+ || matches!(len_arg.kind, ExprKind::MethodCall(path, ..) if path.ident.as_str() == "capacity")
}
+
+ self.vec_alloc.size_expr = InitializedSize::Initialized(len_arg);
+ return true;
}
}
diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
index a13bc7a51..f23916527 100644
--- a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
+++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs
@@ -1,9 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_help;
+use rustc_hir::def::Res;
use rustc_hir::def_id::DefId;
-use rustc_hir::{def::Res, HirId, Path, PathSegment};
+use rustc_hir::{HirId, Path, PathSegment};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
-use rustc_span::{sym, symbol::kw, Span};
+use rustc_span::symbol::kw;
+use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/strings.rs b/src/tools/clippy/clippy_lints/src/strings.rs
index 8658009eb..76f463fff 100644
--- a/src/tools/clippy/clippy_lints/src/strings.rs
+++ b/src/tools/clippy/clippy_lints/src/strings.rs
@@ -1,8 +1,10 @@
use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg};
use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::ty::is_type_lang_item;
-use clippy_utils::{get_expr_use_or_unification_node, peel_blocks, SpanlessEq};
-use clippy_utils::{get_parent_expr, is_lint_allowed, is_path_diagnostic_item, method_calls};
+use clippy_utils::{
+ get_expr_use_or_unification_node, get_parent_expr, is_lint_allowed, is_path_diagnostic_item, method_calls,
+ peel_blocks, SpanlessEq,
+};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def_id::DefId;
@@ -188,7 +190,7 @@ impl<'tcx> LateLintPass<'tcx> for StringAdd {
);
}
},
- ExprKind::Index(target, _idx) => {
+ ExprKind::Index(target, _idx, _) => {
let e_ty = cx.typeck_results().expr_ty(target).peel_refs();
if e_ty.is_str() || is_type_lang_item(cx, e_ty, LangItem::String) {
span_lint(
@@ -260,7 +262,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes {
// Find string::as_bytes
if let ExprKind::AddrOf(BorrowKind::Ref, _, args) = args[0].kind;
- if let ExprKind::Index(left, right) = args.kind;
+ if let ExprKind::Index(left, right, _) = args.kind;
let (method_names, expressions, _) = method_calls(left, 1);
if method_names.len() == 1;
if expressions.len() == 1;
@@ -326,7 +328,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes {
{
// Don't lint. Byte strings produce `&[u8; N]` whereas `as_bytes()` produces
// `&[u8]`. This change would prevent matching with different sized slices.
- } else {
+ } else if !callsite.starts_with("env!") {
span_lint_and_sugg(
cx,
STRING_LIT_AS_BYTES,
diff --git a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
index 2f2e84fa3..b3db5e9a4 100644
--- a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
+++ b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs
@@ -78,7 +78,7 @@ impl<'tcx> LateLintPass<'tcx> for StrlenOnCStrings {
STRLEN_ON_C_STRINGS,
span,
"using `libc::strlen` on a `CString` or `CStr` value",
- "try this",
+ "try",
format!("{val_name}.{method_name}().len()"),
app,
);
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
index e5746ca99..8be4ec3dc 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs
@@ -1,6 +1,7 @@
use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_then};
use if_chain::if_chain;
-use rustc_ast::{token::CommentKind, AttrKind, AttrStyle, Attribute, Item};
+use rustc_ast::token::CommentKind;
+use rustc_ast::{AttrKind, AttrStyle, Attribute, Item};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
index e2cdc48b5..23d6e2a84 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_operation_groupings.rs
@@ -572,7 +572,7 @@ fn ident_difference_expr_with_base_location(
| (AddrOf(_, _, _), AddrOf(_, _, _))
| (Path(_, _), Path(_, _))
| (Range(_, _, _), Range(_, _, _))
- | (Index(_, _), Index(_, _))
+ | (Index(_, _, _), Index(_, _, _))
| (Field(_, _), Field(_, _))
| (AssignOp(_, _, _), AssignOp(_, _, _))
| (Assign(_, _, _), Assign(_, _, _))
diff --git a/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs b/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs
index 9c0dc8096..8e156b882 100644
--- a/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs
+++ b/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs
@@ -1,4 +1,7 @@
-use clippy_utils::{numeric_literal::NumericLiteral, source::snippet_with_context};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::numeric_literal::NumericLiteral;
+use clippy_utils::source::snippet;
+use rustc_ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
@@ -27,27 +30,29 @@ declare_lint_pass!(ConfusingXorAndPow => [SUSPICIOUS_XOR_USED_AS_POW]);
impl LateLintPass<'_> for ConfusingXorAndPow {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
- if !in_external_macro(cx.sess(), expr.span) &&
- let ExprKind::Binary(op, left, right) = &expr.kind &&
- op.node == BinOpKind::BitXor &&
- left.span.ctxt() == right.span.ctxt() &&
- let ExprKind::Lit(lit_left) = &left.kind &&
- let ExprKind::Lit(lit_right) = &right.kind &&
- let snip_left = snippet_with_context(cx, lit_left.span, lit_left.span.ctxt(), "..", &mut Applicability::MaybeIncorrect) &&
- let snip_right = snippet_with_context(cx, lit_right.span, lit_right.span.ctxt(), "..", &mut Applicability::MaybeIncorrect) &&
- let Some(left_val) = NumericLiteral::from_lit_kind(&snip_left.0, &lit_left.node) &&
- let Some(right_val) = NumericLiteral::from_lit_kind(&snip_right.0, &lit_right.node) &&
- left_val.is_decimal() &&
- right_val.is_decimal() {
- clippy_utils::diagnostics::span_lint_and_sugg(
- cx,
- SUSPICIOUS_XOR_USED_AS_POW,
- expr.span,
- "`^` is not the exponentiation operator",
- "did you mean to write",
- format!("{}.pow({})", left_val.format(), right_val.format()),
- Applicability::MaybeIncorrect,
- );
+ if !in_external_macro(cx.sess(), expr.span)
+ && let ExprKind::Binary(op, left, right) = &expr.kind
+ && op.node == BinOpKind::BitXor
+ && left.span.ctxt() == right.span.ctxt()
+ && let ExprKind::Lit(lit_left) = &left.kind
+ && let ExprKind::Lit(lit_right) = &right.kind
+ && matches!(lit_right.node, LitKind::Int(..) | LitKind::Float(..))
+ && matches!(lit_left.node, LitKind::Int(..) | LitKind::Float(..))
+ && NumericLiteral::from_lit_kind(&snippet(cx, lit_right.span, ".."), &lit_right.node).is_some_and(|x| x.is_decimal())
+ {
+ span_lint_and_sugg(
+ cx,
+ SUSPICIOUS_XOR_USED_AS_POW,
+ expr.span,
+ "`^` is not the exponentiation operator",
+ "did you mean to write",
+ format!(
+ "{}.pow({})",
+ lit_left.node,
+ lit_right.node
+ ),
+ Applicability::MaybeIncorrect,
+ );
}
}
}
diff --git a/src/tools/clippy/clippy_lints/src/swap.rs b/src/tools/clippy/clippy_lints/src/swap.rs
index f7eef03d1..548fabb8b 100644
--- a/src/tools/clippy/clippy_lints/src/swap.rs
+++ b/src/tools/clippy/clippy_lints/src/swap.rs
@@ -11,8 +11,8 @@ use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Spanned;
-use rustc_span::SyntaxContext;
-use rustc_span::{sym, symbol::Ident, Span};
+use rustc_span::symbol::Ident;
+use rustc_span::{sym, Span, SyntaxContext};
declare_clippy_lint! {
/// ### What it does
@@ -86,8 +86,8 @@ fn generate_swap_warning(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>, spa
let mut applicability = Applicability::MachineApplicable;
if !can_mut_borrow_both(cx, e1, e2) {
- if let ExprKind::Index(lhs1, idx1) = e1.kind
- && let ExprKind::Index(lhs2, idx2) = e2.kind
+ if let ExprKind::Index(lhs1, idx1, _) = e1.kind
+ && let ExprKind::Index(lhs2, idx2, _) = e2.kind
&& eq_expr_value(cx, lhs1, lhs2)
&& e1.span.ctxt() == ctxt
&& e2.span.ctxt() == ctxt
diff --git a/src/tools/clippy/clippy_lints/src/temporary_assignment.rs b/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
index 3766b8f8e..b6b653f66 100644
--- a/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
+++ b/src/tools/clippy/clippy_lints/src/temporary_assignment.rs
@@ -33,7 +33,7 @@ impl<'tcx> LateLintPass<'tcx> for TemporaryAssignment {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Assign(target, ..) = &expr.kind {
let mut base = target;
- while let ExprKind::Field(f, _) | ExprKind::Index(f, _) = &base.kind {
+ while let ExprKind::Field(f, _) | ExprKind::Index(f, _, _) = &base.kind {
base = f;
}
if is_temporary(base) && !is_adjusted(cx, base) {
diff --git a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
index 0a0a77082..b356666d8 100644
--- a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
+++ b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs
@@ -1,8 +1,11 @@
-use clippy_utils::{diagnostics::span_lint_and_note, is_in_cfg_test, is_in_test_function};
-use rustc_hir::{intravisit::FnKind, Body, FnDecl};
+use clippy_utils::diagnostics::span_lint_and_note;
+use clippy_utils::{is_in_cfg_test, is_in_test_function};
+use rustc_hir::intravisit::FnKind;
+use rustc_hir::{Body, FnDecl};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{def_id::LocalDefId, Span};
+use rustc_span::def_id::LocalDefId;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
index 2512500a6..f1b703fde 100644
--- a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
+++ b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs
@@ -82,7 +82,7 @@ impl<'tcx> LateLintPass<'tcx> for ToDigitIsSome {
TO_DIGIT_IS_SOME,
expr.span,
"use of `.to_digit(..).is_some()`",
- "try this",
+ "try",
if is_method_call {
format!("{char_arg_snip}.is_digit({radix_snip})")
} else {
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs
index 550365325..c0d0d2b93 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs
@@ -4,10 +4,8 @@ use clippy_utils::sugg;
use rustc_errors::Applicability;
use rustc_hir::Expr;
use rustc_lint::LateContext;
-use rustc_middle::{
- query::Key,
- ty::{self, Ty},
-};
+use rustc_middle::query::Key;
+use rustc_middle::ty::{self, Ty};
use rustc_span::symbol::sym;
/// Checks for `transmute_int_to_non_zero` lint.
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs
index 857d2ad82..4ae4359ee 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs
@@ -24,7 +24,7 @@ pub(super) fn check<'tcx>(
"transmute from a pointer to a pointer",
|diag| {
if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
- let sugg = arg.as_ty(Ty::new_ptr(cx.tcx,*to_ty));
+ let sugg = arg.as_ty(Ty::new_ptr(cx.tcx, *to_ty));
diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified);
}
},
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
index 5e24213d0..c61eb0a93 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs
@@ -3,8 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::ty::is_c_void;
use rustc_hir::Expr;
use rustc_lint::LateContext;
-use rustc_middle::ty::SubstsRef;
-use rustc_middle::ty::{self, IntTy, Ty, TypeAndMut, UintTy};
+use rustc_middle::ty::{self, GenericArgsRef, IntTy, Ty, TypeAndMut, UintTy};
#[expect(clippy::too_many_lines)]
pub(super) fn check<'tcx>(
@@ -268,12 +267,12 @@ fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx>
}
ReducedTy::UnorderedFields(ty)
},
- ty::Adt(def, substs) if def.is_struct() => {
+ ty::Adt(def, args) if def.is_struct() => {
let mut iter = def
.non_enum_variant()
.fields
.iter()
- .map(|f| cx.tcx.type_of(f.did).subst(cx.tcx, substs));
+ .map(|f| cx.tcx.type_of(f.did).instantiate(cx.tcx, args));
let Some(sized_ty) = iter.find(|&ty| !is_zero_sized_ty(cx, ty)) else {
return ReducedTy::TypeErasure { raw_ptr_only: false };
};
@@ -322,7 +321,7 @@ fn is_size_pair(ty: Ty<'_>) -> bool {
}
}
-fn same_except_params<'tcx>(subs1: SubstsRef<'tcx>, subs2: SubstsRef<'tcx>) -> bool {
+fn same_except_params<'tcx>(subs1: GenericArgsRef<'tcx>, subs2: GenericArgsRef<'tcx>) -> bool {
// TODO: check const parameters as well. Currently this will consider `Array<5>` the same as
// `Array<6>`
for (ty1, ty2) in subs1.types().zip(subs2.types()).filter(|(ty1, ty2)| ty1 != ty2) {
diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
index 85cd74f23..513a913f5 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs
@@ -6,7 +6,8 @@ use rustc_ast::ExprPrecedence;
use rustc_errors::Applicability;
use rustc_hir::{Expr, Node};
use rustc_lint::LateContext;
-use rustc_middle::ty::{cast::CastKind, Ty};
+use rustc_middle::ty::cast::CastKind;
+use rustc_middle::ty::Ty;
/// Checks for `transmutes_expressible_as_ptr_casts` lint.
/// Returns `true` if it's triggered, otherwise returns `false`.
diff --git a/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs
index b1445311b..891fefc17 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/unsound_collection_transmute.rs
@@ -10,7 +10,7 @@ use rustc_span::symbol::sym;
/// Returns `true` if it's triggered, otherwise returns `false`.
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> bool {
match (&from_ty.kind(), &to_ty.kind()) {
- (ty::Adt(from_adt, from_substs), ty::Adt(to_adt, to_substs)) => {
+ (ty::Adt(from_adt, from_args), ty::Adt(to_adt, to_args)) => {
if from_adt.did() != to_adt.did() {
return false;
}
@@ -28,9 +28,9 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty
) {
return false;
}
- if from_substs
+ if from_args
.types()
- .zip(to_substs.types())
+ .zip(to_args.types())
.any(|(from_ty, to_ty)| is_layout_incompatible(cx, from_ty, to_ty))
{
span_lint(
diff --git a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
index b6615410e..088c8fda8 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
@@ -43,7 +43,7 @@ pub(super) fn check<'tcx>(
let sugg = if *ptr_ty == rty_and_mut {
arg.as_ty(to_ty)
} else {
- arg.as_ty(Ty::new_ptr(cx.tcx,rty_and_mut)).as_ty(to_ty)
+ arg.as_ty(Ty::new_ptr(cx.tcx, rty_and_mut)).as_ty(to_ty)
};
diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified);
diff --git a/src/tools/clippy/clippy_lints/src/transmute/utils.rs b/src/tools/clippy/clippy_lints/src/transmute/utils.rs
index 62efd13b8..1cf6cf854 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/utils.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/utils.rs
@@ -2,7 +2,8 @@ use rustc_hir as hir;
use rustc_hir::Expr;
use rustc_hir_typeck::{cast, FnCtxt, Inherited};
use rustc_lint::LateContext;
-use rustc_middle::ty::{cast::CastKind, Ty};
+use rustc_middle::ty::cast::CastKind;
+use rustc_middle::ty::Ty;
use rustc_span::DUMMY_SP;
// check if the component types of the transmuted collection and the result have different ABI,
diff --git a/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs b/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs
index 90eb45a09..78ad52d8a 100644
--- a/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs
+++ b/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs
@@ -1,23 +1,29 @@
-use clippy_utils::{
- diagnostics::span_lint_and_help,
- is_from_proc_macro,
- msrvs::{self, Msrv},
- path_to_local,
-};
+use clippy_utils::diagnostics::span_lint_and_help;
+use clippy_utils::msrvs::{self, Msrv};
+use clippy_utils::visitors::for_each_local_use_after_expr;
+use clippy_utils::{is_from_proc_macro, path_to_local};
+use itertools::Itertools;
use rustc_ast::LitKind;
-use rustc_hir::{Expr, ExprKind, HirId, Node, Pat};
+use rustc_hir::{Expr, ExprKind, Node, PatKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::{lint::in_external_macro, ty};
+use rustc_middle::lint::in_external_macro;
+use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use std::iter::once;
+use std::ops::ControlFlow;
declare_clippy_lint! {
/// ### What it does
/// Checks for tuple<=>array conversions that are not done with `.into()`.
///
/// ### Why is this bad?
- /// It's unnecessary complexity. `.into()` works for tuples<=>arrays at or below 12 elements and
- /// conveys the intent a lot better, while also leaving less room for hard to spot bugs!
+ /// It may be unnecessary complexity. `.into()` works for converting tuples<=> arrays of up to
+ /// 12 elements and conveys the intent more clearly, while also leaving less room for hard to
+ /// spot bugs!
+ ///
+ /// ### Known issues
+ /// The suggested code may hide potential asymmetry in some cases. See
+ /// [#11085](https://github.com/rust-lang/rust-clippy/issues/11085) for more info.
///
/// ### Example
/// ```rust,ignore
@@ -43,130 +49,152 @@ pub struct TupleArrayConversions {
impl LateLintPass<'_> for TupleArrayConversions {
fn check_expr<'tcx>(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
- if !in_external_macro(cx.sess(), expr.span) && self.msrv.meets(msrvs::TUPLE_ARRAY_CONVERSIONS) {
- match expr.kind {
- ExprKind::Array(elements) if (1..=12).contains(&elements.len()) => check_array(cx, expr, elements),
- ExprKind::Tup(elements) if (1..=12).contains(&elements.len()) => check_tuple(cx, expr, elements),
- _ => {},
- }
+ if in_external_macro(cx.sess(), expr.span) || !self.msrv.meets(msrvs::TUPLE_ARRAY_CONVERSIONS) {
+ return;
+ }
+
+ match expr.kind {
+ ExprKind::Array(elements) if (1..=12).contains(&elements.len()) => check_array(cx, expr, elements),
+ ExprKind::Tup(elements) if (1..=12).contains(&elements.len()) => check_tuple(cx, expr, elements),
+ _ => {},
}
}
extract_msrv_attr!(LateContext);
}
-#[expect(
- clippy::blocks_in_if_conditions,
- reason = "not a FP, but this is much easier to understand"
-)]
fn check_array<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, elements: &'tcx [Expr<'tcx>]) {
- if should_lint(
- cx,
- elements,
- // This is cursed.
- Some,
- |(first_id, local)| {
- if let Node::Pat(pat) = local
- && let parent = parent_pat(cx, pat)
- && parent.hir_id == first_id
- {
- return matches!(
- cx.typeck_results().pat_ty(parent).peel_refs().kind(),
- ty::Tuple(len) if len.len() == elements.len()
- );
- }
-
- false
- },
- ) || should_lint(
- cx,
- elements,
- |(i, expr)| {
- if let ExprKind::Field(path, field) = expr.kind && field.as_str() == i.to_string() {
- return Some((i, path));
- };
-
- None
- },
- |(first_id, local)| {
- if let Node::Pat(pat) = local
- && let parent = parent_pat(cx, pat)
- && parent.hir_id == first_id
- {
- return matches!(
- cx.typeck_results().pat_ty(parent).peel_refs().kind(),
- ty::Tuple(len) if len.len() == elements.len()
- );
- }
+ let (ty::Array(ty, _) | ty::Slice(ty)) = cx.typeck_results().expr_ty(expr).kind() else {
+ unreachable!("`expr` must be an array or slice due to `ExprKind::Array`");
+ };
+
+ if let [first, ..] = elements
+ && let Some(locals) = (match first.kind {
+ ExprKind::Field(_, _) => elements
+ .iter()
+ .enumerate()
+ .map(|(i, f)| -> Option<&'tcx Expr<'tcx>> {
+ let ExprKind::Field(lhs, ident) = f.kind else {
+ return None;
+ };
+ (ident.name.as_str() == i.to_string()).then_some(lhs)
+ })
+ .collect::<Option<Vec<_>>>(),
+ ExprKind::Path(_) => Some(elements.iter().collect()),
+ _ => None,
+ })
+ && all_bindings_are_for_conv(cx, &[*ty], expr, elements, &locals, ToType::Array)
+ && !is_from_proc_macro(cx, expr)
+ {
+ span_lint_and_help(
+ cx,
+ TUPLE_ARRAY_CONVERSIONS,
+ expr.span,
+ "it looks like you're trying to convert a tuple to an array",
+ None,
+ "use `.into()` instead, or `<[T; N]>::from` if type annotations are needed",
+ );
+ }
+}
- false
- },
- ) {
- emit_lint(cx, expr, ToType::Array);
+fn check_tuple<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, elements: &'tcx [Expr<'tcx>]) {
+ if let ty::Tuple(tys) = cx.typeck_results().expr_ty(expr).kind()
+ && let [first, ..] = elements
+ // Fix #11100
+ && tys.iter().all_equal()
+ && let Some(locals) = (match first.kind {
+ ExprKind::Index(..) => elements
+ .iter()
+ .enumerate()
+ .map(|(i, i_expr)| -> Option<&'tcx Expr<'tcx>> {
+ if let ExprKind::Index(lhs, index, _) = i_expr.kind
+ && let ExprKind::Lit(lit) = index.kind
+ && let LitKind::Int(val, _) = lit.node
+ {
+ return (val == i as u128).then_some(lhs);
+ };
+
+ None
+ })
+ .collect::<Option<Vec<_>>>(),
+ ExprKind::Path(_) => Some(elements.iter().collect()),
+ _ => None,
+ })
+ && all_bindings_are_for_conv(cx, tys, expr, elements, &locals, ToType::Tuple)
+ && !is_from_proc_macro(cx, expr)
+ {
+ span_lint_and_help(
+ cx,
+ TUPLE_ARRAY_CONVERSIONS,
+ expr.span,
+ "it looks like you're trying to convert an array to a tuple",
+ None,
+ "use `.into()` instead, or `<(T0, T1, ..., Tn)>::from` if type annotations are needed",
+ );
}
}
-#[expect(
- clippy::blocks_in_if_conditions,
- reason = "not a FP, but this is much easier to understand"
-)]
+/// Checks that every binding in `elements` comes from the same parent `Pat` with the kind if there
+/// is a parent `Pat`. Returns false in any of the following cases:
+/// * `kind` does not match `pat.kind`
+/// * one or more elements in `elements` is not a binding
+/// * one or more bindings does not have the same parent `Pat`
+/// * one or more bindings are used after `expr`
+/// * the bindings do not all have the same type
#[expect(clippy::cast_possible_truncation)]
-fn check_tuple<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, elements: &'tcx [Expr<'tcx>]) {
- if should_lint(cx, elements, Some, |(first_id, local)| {
- if let Node::Pat(pat) = local
- && let parent = parent_pat(cx, pat)
- && parent.hir_id == first_id
- {
- return matches!(
- cx.typeck_results().pat_ty(parent).peel_refs().kind(),
- ty::Array(_, len) if len.eval_target_usize(cx.tcx, cx.param_env) as usize == elements.len()
- );
+fn all_bindings_are_for_conv<'tcx>(
+ cx: &LateContext<'tcx>,
+ final_tys: &[Ty<'tcx>],
+ expr: &Expr<'_>,
+ elements: &[Expr<'_>],
+ locals: &[&Expr<'_>],
+ kind: ToType,
+) -> bool {
+ let Some(locals) = locals.iter().map(|e| path_to_local(e)).collect::<Option<Vec<_>>>() else {
+ return false;
+ };
+ let Some(local_parents) = locals
+ .iter()
+ .map(|&l| cx.tcx.hir().find_parent(l))
+ .collect::<Option<Vec<_>>>()
+ else {
+ return false;
+ };
+
+ local_parents
+ .iter()
+ .map(|node| match node {
+ Node::Pat(pat) => kind.eq(&pat.kind).then_some(pat.hir_id),
+ Node::Local(l) => Some(l.hir_id),
+ _ => None,
+ })
+ .all_equal()
+ // Fix #11124, very convenient utils function! ❤️
+ && locals
+ .iter()
+ .all(|&l| for_each_local_use_after_expr(cx, l, expr.hir_id, |_| ControlFlow::Break::<()>(())).is_continue())
+ && local_parents.first().is_some_and(|node| {
+ let Some(ty) = match node {
+ Node::Pat(pat) => Some(pat.hir_id),
+ Node::Local(l) => Some(l.hir_id),
+ _ => None,
}
-
- false
- }) || should_lint(
- cx,
- elements,
- |(i, expr)| {
- if let ExprKind::Index(path, index) = expr.kind
- && let ExprKind::Lit(lit) = index.kind
- && let LitKind::Int(val, _) = lit.node
- && val as usize == i
- {
- return Some((i, path));
+ .map(|hir_id| cx.typeck_results().node_type(hir_id)) else {
+ return false;
};
-
- None
- },
- |(first_id, local)| {
- if let Node::Pat(pat) = local
- && let parent = parent_pat(cx, pat)
- && parent.hir_id == first_id
- {
- return matches!(
- cx.typeck_results().pat_ty(parent).peel_refs().kind(),
- ty::Array(_, len) if len.eval_target_usize(cx.tcx, cx.param_env) as usize == elements.len()
- );
+ match (kind, ty.kind()) {
+ // Ensure the final type and the original type have the same length, and that there
+ // is no implicit `&mut`<=>`&` anywhere (#11100). Bit ugly, I know, but it works.
+ (ToType::Array, ty::Tuple(tys)) => {
+ tys.len() == elements.len() && tys.iter().chain(final_tys.iter().copied()).all_equal()
+ },
+ (ToType::Tuple, ty::Array(ty, len)) => {
+ len.eval_target_usize(cx.tcx, cx.param_env) as usize == elements.len()
+ && final_tys.iter().chain(once(ty)).all_equal()
+ },
+ _ => false,
}
-
- false
- },
- ) {
- emit_lint(cx, expr, ToType::Tuple);
- }
-}
-
-/// Walks up the `Pat` until it's reached the final containing `Pat`.
-fn parent_pat<'tcx>(cx: &LateContext<'tcx>, start: &'tcx Pat<'tcx>) -> &'tcx Pat<'tcx> {
- let mut end = start;
- for (_, node) in cx.tcx.hir().parent_iter(start.hir_id) {
- if let Node::Pat(pat) = node {
- end = pat;
- } else {
- break;
- }
- }
- end
+ })
}
#[derive(Clone, Copy)]
@@ -175,61 +203,11 @@ enum ToType {
Tuple,
}
-impl ToType {
- fn msg(self) -> &'static str {
- match self {
- ToType::Array => "it looks like you're trying to convert a tuple to an array",
- ToType::Tuple => "it looks like you're trying to convert an array to a tuple",
- }
- }
-
- fn help(self) -> &'static str {
+impl PartialEq<PatKind<'_>> for ToType {
+ fn eq(&self, other: &PatKind<'_>) -> bool {
match self {
- ToType::Array => "use `.into()` instead, or `<[T; N]>::from` if type annotations are needed",
- ToType::Tuple => "use `.into()` instead, or `<(T0, T1, ..., Tn)>::from` if type annotations are needed",
+ ToType::Array => matches!(other, PatKind::Tuple(_, _)),
+ ToType::Tuple => matches!(other, PatKind::Slice(_, _, _)),
}
}
}
-
-fn emit_lint<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, to_type: ToType) -> bool {
- if !is_from_proc_macro(cx, expr) {
- span_lint_and_help(
- cx,
- TUPLE_ARRAY_CONVERSIONS,
- expr.span,
- to_type.msg(),
- None,
- to_type.help(),
- );
-
- return true;
- }
-
- false
-}
-
-fn should_lint<'tcx>(
- cx: &LateContext<'tcx>,
- elements: &'tcx [Expr<'tcx>],
- map: impl FnMut((usize, &'tcx Expr<'tcx>)) -> Option<(usize, &Expr<'_>)>,
- predicate: impl FnMut((HirId, &Node<'tcx>)) -> bool,
-) -> bool {
- if let Some(elements) = elements
- .iter()
- .enumerate()
- .map(map)
- .collect::<Option<Vec<_>>>()
- && let Some(locals) = elements
- .iter()
- .map(|(_, element)| path_to_local(element).and_then(|local| cx.tcx.hir().find(local)))
- .collect::<Option<Vec<_>>>()
- && let [first, rest @ ..] = &*locals
- && let Node::Pat(first_pat) = first
- && let parent = parent_pat(cx, first_pat).hir_id
- && rest.iter().chain(once(first)).map(|i| (parent, i)).all(predicate)
- {
- return true;
- }
-
- false
-}
diff --git a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
index acdf54710..306ca5724 100644
--- a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
+++ b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs
@@ -2,8 +2,9 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet;
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{self as hir, GenericArg, GenericBounds, GenericParamKind};
-use rustc_hir::{HirId, Lifetime, MutTy, Mutability, Node, QPath, TyKind};
+use rustc_hir::{
+ self as hir, GenericArg, GenericBounds, GenericParamKind, HirId, Lifetime, MutTy, Mutability, Node, QPath, TyKind,
+};
use rustc_lint::LateContext;
use rustc_span::sym;
diff --git a/src/tools/clippy/clippy_lints/src/types/box_collection.rs b/src/tools/clippy/clippy_lints/src/types/box_collection.rs
index 43665a922..4a5a94f26 100644
--- a/src/tools/clippy/clippy_lints/src/types/box_collection.rs
+++ b/src/tools/clippy/clippy_lints/src/types/box_collection.rs
@@ -1,6 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::{path_def_id, qpath_generic_tys};
-use rustc_hir::{self as hir, def_id::DefId, QPath};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir, QPath};
use rustc_lint::LateContext;
use rustc_span::{sym, Symbol};
diff --git a/src/tools/clippy/clippy_lints/src/types/linked_list.rs b/src/tools/clippy/clippy_lints/src/types/linked_list.rs
index 5fb708741..fba804bbe 100644
--- a/src/tools/clippy/clippy_lints/src/types/linked_list.rs
+++ b/src/tools/clippy/clippy_lints/src/types/linked_list.rs
@@ -1,5 +1,6 @@
use clippy_utils::diagnostics::span_lint_and_help;
-use rustc_hir::{self as hir, def_id::DefId};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir};
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
diff --git a/src/tools/clippy/clippy_lints/src/types/mod.rs b/src/tools/clippy/clippy_lints/src/types/mod.rs
index 3c873a590..79f9d45d5 100644
--- a/src/tools/clippy/clippy_lints/src/types/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/types/mod.rs
@@ -349,7 +349,7 @@ impl<'tcx> LateLintPass<'tcx> for Types {
let is_exported = cx.effective_visibilities.is_exported(item.owner_id.def_id);
match item.kind {
- ItemKind::Static(ty, _, _) | ItemKind::Const(ty, _) => self.check_ty(
+ ItemKind::Static(ty, _, _) | ItemKind::Const(ty, _, _) => self.check_ty(
cx,
ty,
CheckTyContext {
diff --git a/src/tools/clippy/clippy_lints/src/types/option_option.rs b/src/tools/clippy/clippy_lints/src/types/option_option.rs
index 8767e3c30..60622903a 100644
--- a/src/tools/clippy/clippy_lints/src/types/option_option.rs
+++ b/src/tools/clippy/clippy_lints/src/types/option_option.rs
@@ -1,7 +1,8 @@
use clippy_utils::diagnostics::span_lint;
use clippy_utils::{path_def_id, qpath_generic_tys};
use if_chain::if_chain;
-use rustc_hir::{self as hir, def_id::DefId, QPath};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir, QPath};
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
diff --git a/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs b/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs
index 855137b14..f6c2d8d5a 100644
--- a/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs
+++ b/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs
@@ -2,7 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::{path_def_id, qpath_generic_tys};
use rustc_errors::Applicability;
-use rustc_hir::{self as hir, def_id::DefId, QPath, TyKind};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir, QPath, TyKind};
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
@@ -22,7 +23,9 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_
app,
);
} else {
- let Some(ty) = qpath_generic_tys(qpath).next() else { return false };
+ let Some(ty) = qpath_generic_tys(qpath).next() else {
+ return false;
+ };
let Some(id) = path_def_id(cx, ty) else { return false };
if !cx.tcx.is_diagnostic_item(sym::Vec, id) {
return false;
diff --git a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
index a75972cf3..a616c3e4e 100644
--- a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
+++ b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs
@@ -1,7 +1,8 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::{path_def_id, qpath_generic_tys};
use if_chain::if_chain;
-use rustc_hir::{self as hir, def_id::DefId, QPath};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir, QPath};
use rustc_lint::LateContext;
use rustc_span::symbol::sym;
diff --git a/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs b/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs
index f7adc9d35..5a986254f 100644
--- a/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs
+++ b/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs
@@ -2,7 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::{snippet, snippet_with_applicability};
use clippy_utils::{path_def_id, qpath_generic_tys};
use rustc_errors::Applicability;
-use rustc_hir::{self as hir, def_id::DefId, QPath, TyKind};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir, QPath, TyKind};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::LateContext;
use rustc_middle::ty::TypeVisitableExt;
@@ -39,7 +40,9 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_
return true;
}
- let Some(ty) = qpath_generic_tys(qpath).next() else { return false };
+ let Some(ty) = qpath_generic_tys(qpath).next() else {
+ return false;
+ };
let Some(id) = path_def_id(cx, ty) else { return false };
let (inner_sym, ty) = match cx.tcx.get_diagnostic_name(id) {
Some(sym::Arc) => ("Arc", ty),
@@ -49,7 +52,7 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_
};
let TyKind::Path(inner_qpath) = &ty.kind else {
- return false
+ return false;
};
let inner_span = match qpath_generic_tys(inner_qpath).next() {
Some(hir_ty) => {
diff --git a/src/tools/clippy/clippy_lints/src/types/vec_box.rs b/src/tools/clippy/clippy_lints/src/types/vec_box.rs
index d3062f3d2..decc183ad 100644
--- a/src/tools/clippy/clippy_lints/src/types/vec_box.rs
+++ b/src/tools/clippy/clippy_lints/src/types/vec_box.rs
@@ -3,7 +3,8 @@ use clippy_utils::last_path_segment;
use clippy_utils::source::snippet;
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{self as hir, def_id::DefId, GenericArg, QPath, TyKind};
+use rustc_hir::def_id::DefId;
+use rustc_hir::{self as hir, GenericArg, QPath, TyKind};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::LateContext;
use rustc_middle::ty::layout::LayoutOf;
diff --git a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
index a9deee967..f2ef60201 100644
--- a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
+++ b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs
@@ -158,11 +158,12 @@ impl<'tcx> LateLintPass<'tcx> for UndocumentedUnsafeBlocks {
}
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &hir::Stmt<'tcx>) {
- let (
- hir::StmtKind::Local(&hir::Local { init: Some(expr), .. })
- | hir::StmtKind::Expr(expr)
- | hir::StmtKind::Semi(expr)
- ) = stmt.kind else { return };
+ let (hir::StmtKind::Local(&hir::Local { init: Some(expr), .. })
+ | hir::StmtKind::Expr(expr)
+ | hir::StmtKind::Semi(expr)) = stmt.kind
+ else {
+ return;
+ };
if !is_lint_allowed(cx, UNNECESSARY_SAFETY_COMMENT, stmt.hir_id)
&& !in_external_macro(cx.tcx.sess, stmt.span)
&& let HasSafetyComment::Yes(pos) = stmt_has_safety_comment(cx, stmt.span, stmt.hir_id)
diff --git a/src/tools/clippy/clippy_lints/src/uninit_vec.rs b/src/tools/clippy/clippy_lints/src/uninit_vec.rs
index 1ab0162a8..6756df8e7 100644
--- a/src/tools/clippy/clippy_lints/src/uninit_vec.rs
+++ b/src/tools/clippy/clippy_lints/src/uninit_vec.rs
@@ -88,7 +88,7 @@ fn handle_uninit_vec_pair<'tcx>(
if let Some((set_len_self, call_span)) = extract_set_len_self(cx, maybe_set_len);
if vec.location.eq_expr(cx, set_len_self);
if let ty::Ref(_, vec_ty, _) = cx.typeck_results().expr_ty_adjusted(set_len_self).kind();
- if let ty::Adt(_, substs) = vec_ty.kind();
+ if let ty::Adt(_, args) = vec_ty.kind();
// `#[allow(...)]` attribute can be set on enclosing unsafe block of `set_len()`
if !is_lint_allowed(cx, UNINIT_VEC, maybe_set_len.hir_id);
then {
@@ -96,7 +96,7 @@ fn handle_uninit_vec_pair<'tcx>(
// with_capacity / reserve -> set_len
// Check T of Vec<T>
- if !is_uninit_value_valid_for_ty(cx, substs.type_at(0)) {
+ if !is_uninit_value_valid_for_ty(cx, args.type_at(0)) {
// FIXME: #7698, false positive of the internal lints
#[expect(clippy::collapsible_span_lint_calls)]
span_lint_and_then(
diff --git a/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs b/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
index 99a1d1976..dd829ded0 100644
--- a/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_return_expecting_ord.rs
@@ -65,7 +65,7 @@ fn get_projection_pred<'tcx>(
generics.predicates.iter().find_map(|(proj_pred, _)| {
if let ClauseKind::Projection(pred) = proj_pred.kind().skip_binder() {
let projection_pred = cx.tcx.erase_late_bound_regions(proj_pred.kind().rebind(pred));
- if projection_pred.projection_ty.substs == trait_pred.trait_ref.substs {
+ if projection_pred.projection_ty.args == trait_pred.trait_ref.args {
return Some(projection_pred);
}
}
@@ -76,7 +76,7 @@ fn get_projection_pred<'tcx>(
fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Vec<(usize, String)> {
let mut args_to_check = Vec::new();
if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) {
- let fn_sig = cx.tcx.fn_sig(def_id).subst_identity();
+ let fn_sig = cx.tcx.fn_sig(def_id).instantiate_identity();
let generics = cx.tcx.predicates_of(def_id);
let fn_mut_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.lang_items().fn_mut_trait());
let ord_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.get_diagnostic_item(sym::Ord));
@@ -120,8 +120,8 @@ fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Ve
fn check_arg<'tcx>(cx: &LateContext<'tcx>, arg: &'tcx Expr<'tcx>) -> Option<(Span, Option<Span>)> {
if_chain! {
if let ExprKind::Closure(&Closure { body, fn_decl_span, .. }) = arg.kind;
- if let ty::Closure(_def_id, substs) = &cx.typeck_results().node_type(arg.hir_id).kind();
- let ret_ty = substs.as_closure().sig().output();
+ if let ty::Closure(_def_id, args) = &cx.typeck_results().node_type(arg.hir_id).kind();
+ let ret_ty = args.as_closure().sig().output();
let ty = cx.tcx.erase_late_bound_regions(ret_ty);
if ty.is_unit();
then {
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
index cc7c2b039..704d7abd7 100644
--- a/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_types/let_unit_value.rs
@@ -161,7 +161,7 @@ fn needs_inferred_result_ty(
},
_ => return false,
};
- let sig = cx.tcx.fn_sig(id).subst_identity().skip_binder();
+ let sig = cx.tcx.fn_sig(id).instantiate_identity().skip_binder();
if let ty::Param(output_ty) = *sig.output().kind() {
let args: Vec<&Expr<'_>> = if let Some(receiver) = receiver {
std::iter::once(receiver).chain(args.iter()).collect()
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
index dd120599c..462b1aa81 100644
--- a/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_arg.rs
@@ -42,7 +42,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
if cx.typeck_results().expr_ty(arg).is_unit() && !utils::is_unit_literal(arg) {
!matches!(
&arg.kind,
- ExprKind::Match(.., MatchSource::TryDesugar) | ExprKind::Path(..)
+ ExprKind::Match(.., MatchSource::TryDesugar(_)) | ExprKind::Path(..)
)
} else {
false
diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs
index 226495dcb..d4342ec51 100644
--- a/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs
+++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs
@@ -14,7 +14,9 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
"assert_ne" | "debug_assert_ne" => "fail",
_ => return,
};
- let Some ((left, _, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return };
+ let Some((left, _, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else {
+ return;
+ };
if !cx.typeck_results().expr_ty(left).is_unit() {
return;
}
diff --git a/src/tools/clippy/clippy_lints/src/unnamed_address.rs b/src/tools/clippy/clippy_lints/src/unnamed_address.rs
index 0f5cdb6aa..dea8a1e35 100644
--- a/src/tools/clippy/clippy_lints/src/unnamed_address.rs
+++ b/src/tools/clippy/clippy_lints/src/unnamed_address.rs
@@ -97,7 +97,7 @@ impl LateLintPass<'_> for UnnamedAddress {
if let ExprKind::Path(ref func_qpath) = func.kind;
if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
if match_def_path(cx, def_id, &paths::PTR_EQ);
- let ty_param = cx.typeck_results().node_substs(func.hir_id).type_at(0);
+ let ty_param = cx.typeck_results().node_args(func.hir_id).type_at(0);
if ty_param.is_trait();
then {
span_lint_and_help(
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
index e7449639f..ed2ef5063 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs
@@ -1,6 +1,8 @@
-use clippy_utils::{diagnostics::span_lint_and_then, ty::approx_ty_size};
+use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::ty::approx_ty_size;
use rustc_errors::Applicability;
-use rustc_hir::{def_id::LocalDefId, FnDecl, FnRetTy, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::{FnDecl, FnRetTy, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::Symbol;
@@ -63,7 +65,9 @@ impl UnnecessaryBoxReturns {
return;
}
- let FnRetTy::Return(return_ty_hir) = &decl.output else { return };
+ let FnRetTy::Return(return_ty_hir) = &decl.output else {
+ return;
+ };
let return_ty = cx
.tcx
@@ -103,25 +107,33 @@ impl UnnecessaryBoxReturns {
impl LateLintPass<'_> for UnnecessaryBoxReturns {
fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &TraitItem<'_>) {
- let TraitItemKind::Fn(signature, _) = &item.kind else { return };
+ let TraitItemKind::Fn(signature, _) = &item.kind else {
+ return;
+ };
self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name);
}
fn check_impl_item(&mut self, cx: &LateContext<'_>, item: &rustc_hir::ImplItem<'_>) {
// Ignore implementations of traits, because the lint should be on the
// trait, not on the implementation of it.
- let Node::Item(parent) = cx.tcx.hir().get_parent(item.hir_id()) else { return };
+ let Node::Item(parent) = cx.tcx.hir().get_parent(item.hir_id()) else {
+ return;
+ };
let ItemKind::Impl(parent) = parent.kind else { return };
if parent.of_trait.is_some() {
return;
}
- let ImplItemKind::Fn(signature, ..) = &item.kind else { return };
+ let ImplItemKind::Fn(signature, ..) = &item.kind else {
+ return;
+ };
self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name);
}
fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) {
- let ItemKind::Fn(signature, ..) = &item.kind else { return };
+ let ItemKind::Fn(signature, ..) = &item.kind else {
+ return;
+ };
self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name);
}
}
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
index 6e802794f..57a4a429e 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs
@@ -1,4 +1,5 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, ty::is_type_lang_item};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::ty::is_type_lang_item;
use clippy_utils::{match_def_path, paths};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
index 084b03198..f4111186c 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs
@@ -1,4 +1,7 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, get_parent_expr, path_to_local, source::snippet, ty::is_copy};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet;
+use clippy_utils::ty::is_copy;
+use clippy_utils::{get_parent_expr, path_to_local};
use rustc_hir::{BindingAnnotation, Expr, ExprKind, Node, PatKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
index 5073eb02b..f34f8d0e3 100644
--- a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
+++ b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs
@@ -1,6 +1,7 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet;
-use clippy_utils::{contains_return, is_res_lang_ctor, path_res, return_ty, visitors::find_all_ret_expressions};
+use clippy_utils::visitors::find_all_ret_expressions;
+use clippy_utils::{contains_return, is_res_lang_ctor, path_res, return_ty};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
diff --git a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
index a57bf7ee8..9cf595772 100644
--- a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
+++ b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs
@@ -6,7 +6,8 @@ use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::over;
use rustc_ast::mut_visit::*;
use rustc_ast::ptr::P;
-use rustc_ast::{self as ast, Mutability, Pat, PatKind, PatKind::*, DUMMY_NODE_ID};
+use rustc_ast::PatKind::*;
+use rustc_ast::{self as ast, Mutability, Pat, PatKind, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
@@ -162,9 +163,7 @@ fn unnest_or_patterns(pat: &mut P<Pat>) -> bool {
noop_visit_pat(p, self);
// Don't have an or-pattern? Just quit early on.
- let Or(alternatives) = &mut p.kind else {
- return
- };
+ let Or(alternatives) = &mut p.kind else { return };
// Collapse or-patterns directly nested in or-patterns.
let mut idx = 0;
diff --git a/src/tools/clippy/clippy_lints/src/unused_async.rs b/src/tools/clippy/clippy_lints/src/unused_async.rs
index 5e42cf7e4..bc7c3897a 100644
--- a/src/tools/clippy/clippy_lints/src/unused_async.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_async.rs
@@ -1,11 +1,12 @@
-use clippy_utils::diagnostics::span_lint_and_then;
+use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::is_def_id_trait_method;
+use rustc_hir::def::DefKind;
use rustc_hir::intravisit::{walk_body, walk_expr, walk_fn, FnKind, Visitor};
-use rustc_hir::{Body, Expr, ExprKind, FnDecl, YieldSource};
+use rustc_hir::{Body, Expr, ExprKind, FnDecl, Node, YieldSource};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
-use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::def_id::LocalDefId;
+use rustc_session::{declare_tool_lint, impl_lint_pass};
+use rustc_span::def_id::{LocalDefId, LocalDefIdSet};
use rustc_span::Span;
declare_clippy_lint! {
@@ -38,7 +39,24 @@ declare_clippy_lint! {
"finds async functions with no await statements"
}
-declare_lint_pass!(UnusedAsync => [UNUSED_ASYNC]);
+#[derive(Default)]
+pub struct UnusedAsync {
+ /// Keeps track of async functions used as values (i.e. path expressions to async functions that
+ /// are not immediately called)
+ async_fns_as_value: LocalDefIdSet,
+ /// Functions with unused `async`, linted post-crate after we've found all uses of local async
+ /// functions
+ unused_async_fns: Vec<UnusedAsyncFn>,
+}
+
+#[derive(Copy, Clone)]
+struct UnusedAsyncFn {
+ def_id: LocalDefId,
+ fn_span: Span,
+ await_in_async_block: Option<Span>,
+}
+
+impl_lint_pass!(UnusedAsync => [UNUSED_ASYNC]);
struct AsyncFnVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
@@ -101,24 +119,70 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAsync {
};
walk_fn(&mut visitor, fn_kind, fn_decl, body.id(), def_id);
if !visitor.found_await {
- span_lint_and_then(
- cx,
- UNUSED_ASYNC,
- span,
- "unused `async` for function with no await statements",
- |diag| {
- diag.help("consider removing the `async` from this function");
-
- if let Some(span) = visitor.await_in_async_block {
- diag.span_note(
- span,
- "`await` used in an async block, which does not require \
- the enclosing function to be `async`",
- );
- }
- },
- );
+ // Don't lint just yet, but store the necessary information for later.
+ // The actual linting happens in `check_crate_post`, once we've found all
+ // uses of local async functions that do require asyncness to pass typeck
+ self.unused_async_fns.push(UnusedAsyncFn {
+ await_in_async_block: visitor.await_in_async_block,
+ fn_span: span,
+ def_id,
+ });
}
}
}
+
+ fn check_path(&mut self, cx: &LateContext<'tcx>, path: &rustc_hir::Path<'tcx>, hir_id: rustc_hir::HirId) {
+ fn is_node_func_call(node: Node<'_>, expected_receiver: Span) -> bool {
+ matches!(
+ node,
+ Node::Expr(Expr {
+ kind: ExprKind::Call(Expr { span, .. }, _) | ExprKind::MethodCall(_, Expr { span, .. }, ..),
+ ..
+ }) if *span == expected_receiver
+ )
+ }
+
+ // Find paths to local async functions that aren't immediately called.
+ // E.g. `async fn f() {}; let x = f;`
+ // Depending on how `x` is used, f's asyncness might be required despite not having any `await`
+ // statements, so don't lint at all if there are any such paths.
+ if let Some(def_id) = path.res.opt_def_id()
+ && let Some(local_def_id) = def_id.as_local()
+ && let Some(DefKind::Fn) = cx.tcx.opt_def_kind(def_id)
+ && cx.tcx.asyncness(def_id).is_async()
+ && !is_node_func_call(cx.tcx.hir().get_parent(hir_id), path.span)
+ {
+ self.async_fns_as_value.insert(local_def_id);
+ }
+ }
+
+ // After collecting all unused `async` and problematic paths to such functions,
+ // lint those unused ones that didn't have any path expressions to them.
+ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
+ let iter = self
+ .unused_async_fns
+ .iter()
+ .filter(|UnusedAsyncFn { def_id, .. }| (!self.async_fns_as_value.contains(def_id)));
+
+ for fun in iter {
+ span_lint_hir_and_then(
+ cx,
+ UNUSED_ASYNC,
+ cx.tcx.local_def_id_to_hir_id(fun.def_id),
+ fun.fn_span,
+ "unused `async` for function with no await statements",
+ |diag| {
+ diag.help("consider removing the `async` from this function");
+
+ if let Some(span) = fun.await_in_async_block {
+ diag.span_note(
+ span,
+ "`await` used in an async block, which does not require \
+ the enclosing function to be `async`",
+ );
+ }
+ },
+ );
+ }
+ }
}
diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
index 0e526c216..0fcb62017 100644
--- a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs
@@ -48,7 +48,7 @@ declare_lint_pass!(UnusedIoAmount => [UNUSED_IO_AMOUNT]);
impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount {
fn check_stmt(&mut self, cx: &LateContext<'_>, s: &hir::Stmt<'_>) {
let (hir::StmtKind::Semi(expr) | hir::StmtKind::Expr(expr)) = s.kind else {
- return
+ return;
};
match expr.kind {
diff --git a/src/tools/clippy/clippy_lints/src/unused_unit.rs b/src/tools/clippy/clippy_lints/src/unused_unit.rs
index cad8da18c..95e74718d 100644
--- a/src/tools/clippy/clippy_lints/src/unused_unit.rs
+++ b/src/tools/clippy/clippy_lints/src/unused_unit.rs
@@ -1,7 +1,8 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{position_before_rarrow, snippet_opt};
use if_chain::if_chain;
-use rustc_ast::{ast, visit::FnKind, ClosureBinder};
+use rustc_ast::visit::FnKind;
+use rustc_ast::{ast, ClosureBinder};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
diff --git a/src/tools/clippy/clippy_lints/src/unwrap.rs b/src/tools/clippy/clippy_lints/src/unwrap.rs
index 377d3fb6f..c99b0290c 100644
--- a/src/tools/clippy/clippy_lints/src/unwrap.rs
+++ b/src/tools/clippy/clippy_lints/src/unwrap.rs
@@ -1,7 +1,7 @@
use clippy_utils::diagnostics::span_lint_hir_and_then;
-use clippy_utils::higher;
use clippy_utils::ty::is_type_diagnostic_item;
-use clippy_utils::{path_to_local, usage::is_potentially_mutated};
+use clippy_utils::usage::is_potentially_mutated;
+use clippy_utils::{higher, path_to_local};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, Visitor};
diff --git a/src/tools/clippy/clippy_lints/src/use_self.rs b/src/tools/clippy/clippy_lints/src/use_self.rs
index 5a0298745..50231d930 100644
--- a/src/tools/clippy/clippy_lints/src/use_self.rs
+++ b/src/tools/clippy/clippy_lints/src/use_self.rs
@@ -5,13 +5,12 @@ use clippy_utils::ty::same_type_and_consts;
use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
+use rustc_hir::def::{CtorOf, DefKind, Res};
+use rustc_hir::def_id::LocalDefId;
+use rustc_hir::intravisit::{walk_inf, walk_ty, Visitor};
use rustc_hir::{
- self as hir,
- def::{CtorOf, DefKind, Res},
- def_id::LocalDefId,
- intravisit::{walk_inf, walk_ty, Visitor},
- Expr, ExprKind, FnRetTy, FnSig, GenericArg, GenericArgsParentheses, GenericParam, GenericParamKind, HirId, Impl,
- ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath, Ty, TyKind,
+ self as hir, Expr, ExprKind, FnRetTy, FnSig, GenericArg, GenericArgsParentheses, GenericParam, GenericParamKind,
+ HirId, Impl, ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath, Ty, TyKind,
};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
@@ -145,7 +144,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
then {
// `self_ty` is the semantic self type of `impl <trait> for <type>`. This cannot be
// `Self`.
- let self_ty = impl_trait_ref.subst_identity().self_ty();
+ let self_ty = impl_trait_ref.instantiate_identity().self_ty();
// `trait_method_sig` is the signature of the function, how it is declared in the
// trait, not in the impl of the trait.
@@ -154,7 +153,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
.associated_item(impl_item.owner_id)
.trait_item_def_id
.expect("impl method matches a trait method");
- let trait_method_sig = cx.tcx.fn_sig(trait_method).subst_identity();
+ let trait_method_sig = cx.tcx.fn_sig(trait_method).instantiate_identity();
let trait_method_sig = cx.tcx.erase_late_bound_regions(trait_method_sig);
// `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the
@@ -226,7 +225,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
} else {
hir_ty_to_ty(cx.tcx, hir_ty)
};
- if same_type_and_consts(ty, cx.tcx.type_of(impl_id).subst_identity());
+ if same_type_and_consts(ty, cx.tcx.type_of(impl_id).instantiate_identity());
then {
span_lint(cx, hir_ty.span);
}
@@ -238,7 +237,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
if !expr.span.from_expansion();
if self.msrv.meets(msrvs::TYPE_ALIAS_ENUM_VARIANTS);
if let Some(&StackItem::Check { impl_id, .. }) = self.stack.last();
- if cx.typeck_results().expr_ty(expr) == cx.tcx.type_of(impl_id).subst_identity();
+ if cx.typeck_results().expr_ty(expr) == cx.tcx.type_of(impl_id).instantiate_identity();
then {} else { return; }
}
match expr.kind {
@@ -262,7 +261,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf {
if let PatKind::Path(QPath::Resolved(_, path))
| PatKind::TupleStruct(QPath::Resolved(_, path), _, _)
| PatKind::Struct(QPath::Resolved(_, path), _, _) = pat.kind;
- if cx.typeck_results().pat_ty(pat) == cx.tcx.type_of(impl_id).subst_identity();
+ if cx.typeck_results().pat_ty(pat) == cx.tcx.type_of(impl_id).instantiate_identity();
then {
check_path(cx, path);
}
diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
index 22de383ea..5ac4f0aa4 100644
--- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs
+++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs
@@ -1,9 +1,8 @@
use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
-use clippy_utils::is_ty_alias;
use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_context};
use clippy_utils::sugg::Sugg;
use clippy_utils::ty::{is_copy, is_type_diagnostic_item, same_type_and_consts};
-use clippy_utils::{get_parent_expr, is_trait_method, match_def_path, path_to_local, paths};
+use clippy_utils::{get_parent_expr, is_trait_method, is_ty_alias, match_def_path, path_to_local, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::def::DefKind;
@@ -117,9 +116,9 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
}
match e.kind {
- ExprKind::Match(_, arms, MatchSource::TryDesugar) => {
+ ExprKind::Match(_, arms, MatchSource::TryDesugar(_)) => {
let (ExprKind::Ret(Some(e)) | ExprKind::Break(_, Some(e))) = arms[0].body.kind else {
- return
+ return;
};
if let ExprKind::Call(_, [arg, ..]) = e.kind {
self.try_desugar_arm.push(arg.hir_id);
@@ -236,8 +235,8 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
let a = cx.typeck_results().expr_ty(e);
let b = cx.typeck_results().expr_ty(recv);
if is_type_diagnostic_item(cx, a, sym::Result);
- if let ty::Adt(_, substs) = a.kind();
- if let Some(a_type) = substs.types().next();
+ if let ty::Adt(_, args) = a.kind();
+ if let Some(a_type) = args.types().next();
if same_type_and_consts(a_type, b);
then {
@@ -264,8 +263,8 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion {
if_chain! {
if match_def_path(cx, def_id, &paths::TRY_FROM);
if is_type_diagnostic_item(cx, a, sym::Result);
- if let ty::Adt(_, substs) = a.kind();
- if let Some(a_type) = substs.types().next();
+ if let ty::Adt(_, args) = a.kind();
+ if let Some(a_type) = args.types().next();
if same_type_and_consts(a_type, b);
then {
diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs
index 6b51974d7..f02c33cc6 100644
--- a/src/tools/clippy/clippy_lints/src/utils/author.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/author.rs
@@ -526,7 +526,7 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> {
self.ident(field_name);
self.expr(object);
},
- ExprKind::Index(object, index) => {
+ ExprKind::Index(object, index, _) => {
bind!(self, object, index);
kind!("Index({object}, {index})");
self.expr(object);
diff --git a/src/tools/clippy/clippy_lints/src/utils/conf.rs b/src/tools/clippy/clippy_lints/src/utils/conf.rs
index f1d05c752..58ae0656d 100644
--- a/src/tools/clippy/clippy_lints/src/utils/conf.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/conf.rs
@@ -319,7 +319,7 @@ define_Conf! {
/// Lint: DISALLOWED_NAMES.
///
/// The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value
- /// `".."` can be used as part of the list to indicate, that the configured values should be appended to the
+ /// `".."` can be used as part of the list to indicate that the configured values should be appended to the
/// default configuration of Clippy. By default, any configuration will replace the default value.
(disallowed_names: Vec<String> = super::DEFAULT_DISALLOWED_NAMES.iter().map(ToString::to_string).collect()),
/// Lint: SEMICOLON_INSIDE_BLOCK.
@@ -551,6 +551,16 @@ define_Conf! {
///
/// Whether to allow `r#""#` when `r""` can be used
(allow_one_hash_in_raw_strings: bool = false),
+ /// Lint: ABSOLUTE_PATHS.
+ ///
+ /// The maximum number of segments a path can have before being linted, anything above this will
+ /// be linted.
+ (absolute_paths_max_segments: u64 = 2),
+ /// Lint: ABSOLUTE_PATHS.
+ ///
+ /// Which crates to allow absolute paths from
+ (absolute_paths_allowed_crates: rustc_data_structures::fx::FxHashSet<String> =
+ rustc_data_structures::fx::FxHashSet::default()),
}
/// Search for the configuration file.
diff --git a/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs b/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs
index 09fcb82c3..6d3493523 100644
--- a/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs
@@ -71,7 +71,9 @@ fn has_span_from_proc_macro(cx: &EarlyContext<'_>, args: &FormatArgs) -> bool {
for between_span in between_spans {
let mut seen_comma = false;
- let Some(snippet) = snippet_opt(cx, between_span) else { return true };
+ let Some(snippet) = snippet_opt(cx, between_span) else {
+ return true;
+ };
for token in tokenize(&snippet) {
match token.kind {
TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace => {},
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs
index 883a5c08e..fe2f12fe8 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs
@@ -46,7 +46,9 @@ impl<'tcx> LateLintPass<'tcx> for IfChainStyle {
} else {
return;
};
- let ExprKind::Block(then_block, _) = then.kind else { return };
+ let ExprKind::Block(then_block, _) = then.kind else {
+ return;
+ };
let if_chain_span = is_expn_of(expr.span, "if_chain");
if !els {
check_nested_if_chains(cx, expr, then_block, if_chain_span);
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
index dced9fcf9..da8654d93 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs
@@ -78,7 +78,7 @@ impl<'tcx> LateLintPass<'tcx> for InterningDefinedSymbol {
for item in cx.tcx.module_children(def_id) {
if_chain! {
if let Res::Def(DefKind::Const, item_def_id) = item.res;
- let ty = cx.tcx.type_of(item_def_id).subst_identity();
+ let ty = cx.tcx.type_of(item_def_id).instantiate_identity();
if match_type(cx, ty, &paths::SYMBOL);
if let Ok(ConstValue::Scalar(value)) = cx.tcx.const_eval_poly(item_def_id);
if let Ok(value) = value.to_u32();
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs
index 9afe02c1e..4ed985f54 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs
@@ -7,7 +7,8 @@ use rustc_hir::def::DefKind;
use rustc_hir::Item;
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass};
-use rustc_middle::ty::{self, fast_reject::SimplifiedType, FloatTy};
+use rustc_middle::ty::fast_reject::SimplifiedType;
+use rustc_middle::ty::{self, FloatTy};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::Symbol;
@@ -33,7 +34,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidPaths {
let mod_name = &cx.tcx.item_name(local_def_id.to_def_id());
if_chain! {
if mod_name.as_str() == "paths";
- if let hir::ItemKind::Const(ty, body_id) = item.kind;
+ if let hir::ItemKind::Const(ty, _, body_id) = item.kind;
let ty = hir_ty_to_ty(cx.tcx, ty);
if let ty::Array(el_ty, _) = &ty.kind();
if let ty::Ref(_, el_ty, _) = &el_ty.kind();
@@ -73,10 +74,10 @@ pub fn check_path(cx: &LateContext<'_>, path: &[&str]) -> bool {
let lang_items = cx.tcx.lang_items();
// This list isn't complete, but good enough for our current list of paths.
let incoherent_impls = [
- SimplifiedType::FloatSimplifiedType(FloatTy::F32),
- SimplifiedType::FloatSimplifiedType(FloatTy::F64),
- SimplifiedType::SliceSimplifiedType,
- SimplifiedType::StrSimplifiedType,
+ SimplifiedType::Float(FloatTy::F32),
+ SimplifiedType::Float(FloatTy::F64),
+ SimplifiedType::Slice,
+ SimplifiedType::Str,
]
.iter()
.flat_map(|&ty| cx.tcx.incoherent_impls(ty).iter().copied());
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs
index f71820765..87380f14f 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs
@@ -3,10 +3,8 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
use clippy_utils::macros::root_macro_call_first_node;
use clippy_utils::{is_lint_allowed, match_def_path, paths};
use if_chain::if_chain;
-use rustc_ast as ast;
use rustc_ast::ast::LitKind;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::hir_id::CRATE_HIR_ID;
use rustc_hir::intravisit::Visitor;
@@ -18,6 +16,7 @@ use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Spanned;
use rustc_span::symbol::Symbol;
use rustc_span::{sym, Span};
+use {rustc_ast as ast, rustc_hir as hir};
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
index 107a62806..f49c3fadb 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs
@@ -8,11 +8,8 @@
//! a simple mistake)
use crate::renamed_lints::RENAMED_LINTS;
-use crate::utils::{
- collect_configs,
- internal_lints::lint_without_lint_pass::{extract_clippy_version_value, is_lint_ref_type},
- ClippyConfiguration,
-};
+use crate::utils::internal_lints::lint_without_lint_pass::{extract_clippy_version_value, is_lint_ref_type};
+use crate::utils::{collect_configs, ClippyConfiguration};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::ty::{match_type, walk_ptrs_ty_depth};
@@ -21,22 +18,22 @@ use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap;
-use rustc_hir::{
- self as hir, def::DefKind, intravisit, intravisit::Visitor, Closure, ExprKind, Item, ItemKind, Mutability, QPath,
-};
+use rustc_hir::def::DefKind;
+use rustc_hir::intravisit::Visitor;
+use rustc_hir::{self as hir, intravisit, Closure, ExprKind, Item, ItemKind, Mutability, QPath};
use rustc_lint::{CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId};
use rustc_middle::hir::nested_filter;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::Ident;
use rustc_span::{sym, Loc, Span, Symbol};
-use serde::{ser::SerializeStruct, Serialize, Serializer};
+use serde::ser::SerializeStruct;
+use serde::{Serialize, Serializer};
use std::collections::{BTreeSet, BinaryHeap};
use std::fmt;
use std::fmt::Write as _;
use std::fs::{self, OpenOptions};
use std::io::prelude::*;
-use std::path::Path;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
use std::process::Command;
/// This is the json output file of the lint collector.
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs
index 09f0f0d0a..bf835f89c 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs
@@ -7,7 +7,7 @@ use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass, LintContext};
-use rustc_middle::ty::{self, subst::GenericArgKind};
+use rustc_middle::ty::{self, GenericArgKind};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
@@ -39,7 +39,7 @@ impl LateLintPass<'_> for MsrvAttrImpl {
if self_ty_def.all_fields().any(|f| {
cx.tcx
.type_of(f.did)
- .subst_identity()
+ .instantiate_identity()
.walk()
.filter(|t| matches!(t.unpack(), GenericArgKind::Type(_)))
.any(|t| match_type(cx, t.expect_ty(), &paths::MSRV))
diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
index 008423766..f66f33fee 100644
--- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs
@@ -229,11 +229,11 @@ fn path_to_matched_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Ve
Res::Def(DefKind::Static(_), def_id) => read_mir_alloc_def_path(
cx,
cx.tcx.eval_static_initializer(def_id).ok()?.inner(),
- cx.tcx.type_of(def_id).subst_identity(),
+ cx.tcx.type_of(def_id).instantiate_identity(),
),
Res::Def(DefKind::Const, def_id) => match cx.tcx.const_eval_poly(def_id).ok()? {
ConstValue::ByRef { alloc, offset } if offset.bytes() == 0 => {
- read_mir_alloc_def_path(cx, alloc.inner(), cx.tcx.type_of(def_id).subst_identity())
+ read_mir_alloc_def_path(cx, alloc.inner(), cx.tcx.type_of(def_id).instantiate_identity())
},
_ => None,
},
diff --git a/src/tools/clippy/clippy_lints/src/utils/mod.rs b/src/tools/clippy/clippy_lints/src/utils/mod.rs
index fb0825693..4fef8c071 100644
--- a/src/tools/clippy/clippy_lints/src/utils/mod.rs
+++ b/src/tools/clippy/clippy_lints/src/utils/mod.rs
@@ -18,7 +18,7 @@ const BOOK_CONFIGS_PATH: &str = "https://doc.rust-lang.org/clippy/lint_configura
// ==================================================================
// Configuration
// ==================================================================
-#[derive(Debug, Clone, Default)] //~ ERROR no such field
+#[derive(Debug, Clone, Default)]
pub struct ClippyConfiguration {
pub name: String,
#[allow(dead_code)]
diff --git a/src/tools/clippy/clippy_lints/src/vec.rs b/src/tools/clippy/clippy_lints/src/vec.rs
index 2a594e750..fc17e7c6d 100644
--- a/src/tools/clippy/clippy_lints/src/vec.rs
+++ b/src/tools/clippy/clippy_lints/src/vec.rs
@@ -154,6 +154,10 @@ impl UselessVec {
span: Span,
suggest_slice: SuggestedType,
) {
+ if span.from_expansion() {
+ return;
+ }
+
let mut applicability = Applicability::MachineApplicable;
let snippet = match *vec_args {
@@ -181,7 +185,7 @@ impl UselessVec {
if args.len() as u64 * size_of(cx, last) > self.too_large_for_stack {
return;
}
- let span = args[0].span.to(last.span);
+ let span = args[0].span.source_callsite().to(last.span.source_callsite());
let args = snippet_with_applicability(cx, span, "..", &mut applicability);
match suggest_slice {
@@ -230,8 +234,8 @@ fn size_of(cx: &LateContext<'_>, expr: &Expr<'_>) -> u64 {
/// Returns the item type of the vector (i.e., the `T` in `Vec<T>`).
fn vec_type(ty: Ty<'_>) -> Ty<'_> {
- if let ty::Adt(_, substs) = ty.kind() {
- substs.type_at(0)
+ if let ty::Adt(_, args) = ty.kind() {
+ args.type_at(0)
} else {
panic!("The type of `vec!` is a not a struct?");
}
diff --git a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
index bd5be0c9d..3fa51216c 100644
--- a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
+++ b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs
@@ -74,7 +74,7 @@ impl VecPushSearcher {
let mut needs_mut = false;
let res = for_each_local_use_after_expr(cx, self.local_id, self.last_push_expr, |e| {
let Some(parent) = get_parent_expr(cx, e) else {
- return ControlFlow::Continue(())
+ return ControlFlow::Continue(());
};
let adjusted_ty = cx.typeck_results().expr_ty_adjusted(e);
let adjusted_mut = adjusted_ty.ref_mutability().unwrap_or(Mutability::Not);
@@ -88,7 +88,7 @@ impl VecPushSearcher {
let mut last_place = parent;
while let Some(parent) = get_parent_expr(cx, last_place) {
if matches!(parent.kind, ExprKind::Unary(UnOp::Deref, _) | ExprKind::Field(..))
- || matches!(parent.kind, ExprKind::Index(e, _) if e.hir_id == last_place.hir_id)
+ || matches!(parent.kind, ExprKind::Index(e, _, _) if e.hir_id == last_place.hir_id)
{
last_place = parent;
} else {
diff --git a/src/tools/clippy/clippy_lints/src/visibility.rs b/src/tools/clippy/clippy_lints/src/visibility.rs
index 43248bccc..496376520 100644
--- a/src/tools/clippy/clippy_lints/src/visibility.rs
+++ b/src/tools/clippy/clippy_lints/src/visibility.rs
@@ -1,10 +1,12 @@
-use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_opt};
+use clippy_utils::diagnostics::span_lint_and_sugg;
+use clippy_utils::source::snippet_opt;
use rustc_ast::ast::{Item, VisibilityKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
-use rustc_span::{symbol::kw, Span};
+use rustc_span::symbol::kw;
+use rustc_span::Span;
declare_clippy_lint! {
/// ### What it does
diff --git a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
index 2a3d86988..d09d02a7d 100644
--- a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
+++ b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs
@@ -3,10 +3,8 @@ use clippy_utils::is_test_module_or_function;
use clippy_utils::source::{snippet, snippet_with_applicability};
use if_chain::if_chain;
use rustc_errors::Applicability;
-use rustc_hir::{
- def::{DefKind, Res},
- Item, ItemKind, PathSegment, UseKind,
-};
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::{Item, ItemKind, PathSegment, UseKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
diff --git a/src/tools/clippy/clippy_lints/src/write.rs b/src/tools/clippy/clippy_lints/src/write.rs
index f194dc5d4..a9957b18a 100644
--- a/src/tools/clippy/clippy_lints/src/write.rs
+++ b/src/tools/clippy/clippy_lints/src/write.rs
@@ -272,9 +272,15 @@ impl<'tcx> LateLintPass<'tcx> for Write {
}
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return };
- let Some(diag_name) = cx.tcx.get_diagnostic_name(macro_call.def_id) else { return };
- let Some(name) = diag_name.as_str().strip_suffix("_macro") else { return };
+ let Some(macro_call) = root_macro_call_first_node(cx, expr) else {
+ return;
+ };
+ let Some(diag_name) = cx.tcx.get_diagnostic_name(macro_call.def_id) else {
+ return;
+ };
+ let Some(name) = diag_name.as_str().strip_suffix("_macro") else {
+ return;
+ };
let is_build_script = cx
.sess()
@@ -343,7 +349,9 @@ fn is_debug_impl(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
}
fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &MacroCall, name: &str) {
- let Some(FormatArgsPiece::Literal(last)) = format_args.template.last() else { return };
+ let Some(FormatArgsPiece::Literal(last)) = format_args.template.last() else {
+ return;
+ };
let count_vertical_whitespace = || {
format_args
@@ -379,7 +387,9 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &Ma
&format!("using `{name}!()` with a format string that ends in a single newline"),
|diag| {
let name_span = cx.sess().source_map().span_until_char(macro_call.span, '!');
- let Some(format_snippet) = snippet_opt(cx, format_string_span) else { return };
+ let Some(format_snippet) = snippet_opt(cx, format_string_span) else {
+ return;
+ };
if format_args.template.len() == 1 && last.as_str() == "\n" {
// print!("\n"), write!(f, "\n")
@@ -522,7 +532,7 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
{
let replacement = replacement.replace('{', "{{").replace('}', "}}");
diag.multipart_suggestion(
- "try this",
+ "try",
vec![(*placeholder_span, replacement), (removal_span, String::new())],
Applicability::MachineApplicable,
);
diff --git a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
index 93e4b023c..002304f88 100644
--- a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
+++ b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs
@@ -51,8 +51,8 @@ impl LateLintPass<'_> for ZeroSizedMapValues {
if !in_trait_impl(cx, hir_ty.hir_id);
let ty = ty_from_hir_ty(cx, hir_ty);
if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap);
- if let Adt(_, substs) = ty.kind();
- let ty = substs.type_at(1);
+ if let Adt(_, args) = ty.kind();
+ let ty = args.type_at(1);
// Fixes https://github.com/rust-lang/rust-clippy/issues/7447 because of
// https://github.com/rust-lang/rust/blob/master/compiler/rustc_middle/src/ty/sty.rs#L968
if !ty.has_escaping_bound_vars();
diff --git a/src/tools/clippy/clippy_test_deps/Cargo.toml b/src/tools/clippy/clippy_test_deps/Cargo.toml
deleted file mode 100644
index 362c08e0d..000000000
--- a/src/tools/clippy/clippy_test_deps/Cargo.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-[package]
-name = "clippy_test_deps"
-version = "0.1.0"
-edition = "2021"
-
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
-
-[dependencies]
-clippy_utils = { path = "../clippy_utils" }
-derive-new = "0.5"
-if_chain = "1.0"
-itertools = "0.10.1"
-quote = "1.0"
-serde = { version = "1.0.125", features = ["derive"] }
-syn = { version = "2.0", features = ["full"] }
-futures = "0.3"
-parking_lot = "0.12"
-tokio = { version = "1", features = ["io-util"] }
-regex = "1.5"
-clippy_lints = { path = "../clippy_lints" }
-
-[features]
-internal = ["clippy_lints/internal"]
diff --git a/src/tools/clippy/clippy_test_deps/src/lib.rs b/src/tools/clippy/clippy_test_deps/src/lib.rs
deleted file mode 100644
index 7d12d9af8..000000000
--- a/src/tools/clippy/clippy_test_deps/src/lib.rs
+++ /dev/null
@@ -1,14 +0,0 @@
-pub fn add(left: usize, right: usize) -> usize {
- left + right
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn it_works() {
- let result = add(2, 2);
- assert_eq!(result, 4);
- }
-}
diff --git a/src/tools/clippy/clippy_utils/Cargo.toml b/src/tools/clippy/clippy_utils/Cargo.toml
index cfe686eb9..3926b954e 100644
--- a/src/tools/clippy/clippy_utils/Cargo.toml
+++ b/src/tools/clippy/clippy_utils/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "clippy_utils"
-version = "0.1.72"
+version = "0.1.73"
edition = "2021"
publish = false
diff --git a/src/tools/clippy/clippy_utils/src/ast_utils.rs b/src/tools/clippy/clippy_utils/src/ast_utils.rs
index 8cc01f1ef..140cfa219 100644
--- a/src/tools/clippy/clippy_utils/src/ast_utils.rs
+++ b/src/tools/clippy/clippy_utils/src/ast_utils.rs
@@ -178,7 +178,9 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
(Yield(l), Yield(r)) | (Ret(l), Ret(r)) => eq_expr_opt(l, r),
(Break(ll, le), Break(rl, re)) => eq_label(ll, rl) && eq_expr_opt(le, re),
(Continue(ll), Continue(rl)) => eq_label(ll, rl),
- (Assign(l1, l2, _), Assign(r1, r2, _)) | (Index(l1, l2), Index(r1, r2)) => eq_expr(l1, r1) && eq_expr(l2, r2),
+ (Assign(l1, l2, _), Assign(r1, r2, _)) | (Index(l1, l2, _), Index(r1, r2, _)) => {
+ eq_expr(l1, r1) && eq_expr(l2, r2)
+ },
(AssignOp(lo, lp, lv), AssignOp(ro, rp, rv)) => lo.node == ro.node && eq_expr(lp, rp) && eq_expr(lv, rv),
(Field(lp, lf), Field(rp, rf)) => eq_id(*lf, *rf) && eq_expr(lp, rp),
(Match(ls, la), Match(rs, ra)) => eq_expr(ls, rs) && over(la, ra, eq_arm),
@@ -301,15 +303,17 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool {
(
Const(box ast::ConstItem {
defaultness: ld,
+ generics: lg,
ty: lt,
expr: le,
}),
Const(box ast::ConstItem {
defaultness: rd,
+ generics: rg,
ty: rt,
expr: re,
}),
- ) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ ) => eq_defaultness(*ld, *rd) && eq_generics(lg, rg) && eq_ty(lt, rt) && eq_expr_opt(le, re),
(
Fn(box ast::Fn {
defaultness: ld,
@@ -476,15 +480,17 @@ pub fn eq_assoc_item_kind(l: &AssocItemKind, r: &AssocItemKind) -> bool {
(
Const(box ast::ConstItem {
defaultness: ld,
+ generics: lg,
ty: lt,
expr: le,
}),
Const(box ast::ConstItem {
defaultness: rd,
+ generics: rg,
ty: rt,
expr: re,
}),
- ) => eq_defaultness(*ld, *rd) && eq_ty(lt, rt) && eq_expr_opt(le, re),
+ ) => eq_defaultness(*ld, *rd) && eq_generics(lg, rg) && eq_ty(lt, rt) && eq_expr_opt(le, re),
(
Fn(box ast::Fn {
defaultness: ld,
diff --git a/src/tools/clippy/clippy_utils/src/attrs.rs b/src/tools/clippy/clippy_utils/src/attrs.rs
index 49cb9718e..51771f78d 100644
--- a/src/tools/clippy/clippy_utils/src/attrs.rs
+++ b/src/tools/clippy/clippy_utils/src/attrs.rs
@@ -1,5 +1,4 @@
-use rustc_ast::ast;
-use rustc_ast::attr;
+use rustc_ast::{ast, attr};
use rustc_errors::Applicability;
use rustc_session::Session;
use rustc_span::sym;
@@ -143,13 +142,13 @@ pub fn get_unique_attr<'a>(
unique_attr
}
-/// Return true if the attributes contain any of `proc_macro`,
+/// Returns true if the attributes contain any of `proc_macro`,
/// `proc_macro_derive` or `proc_macro_attribute`, false otherwise
pub fn is_proc_macro(attrs: &[ast::Attribute]) -> bool {
attrs.iter().any(rustc_ast::Attribute::is_proc_macro_attr)
}
-/// Return true if the attributes contain `#[doc(hidden)]`
+/// Returns true if the attributes contain `#[doc(hidden)]`
pub fn is_doc_hidden(attrs: &[ast::Attribute]) -> bool {
attrs
.iter()
diff --git a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
index c6d0b654f..6be8b8bb9 100644
--- a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
+++ b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs
@@ -12,20 +12,20 @@
//! code was written, and check if the span contains that text. Note this will only work correctly
//! if the span is not from a `macro_rules` based macro.
-use rustc_ast::{
- ast::{AttrKind, Attribute, IntTy, LitIntType, LitKind, StrStyle, UintTy},
- token::CommentKind,
- AttrStyle,
-};
+use rustc_ast::ast::{AttrKind, Attribute, IntTy, LitIntType, LitKind, StrStyle, UintTy};
+use rustc_ast::token::CommentKind;
+use rustc_ast::AttrStyle;
+use rustc_hir::intravisit::FnKind;
use rustc_hir::{
- intravisit::FnKind, Block, BlockCheckMode, Body, Closure, Destination, Expr, ExprKind, FieldDef, FnHeader, HirId,
- Impl, ImplItem, ImplItemKind, IsAuto, Item, ItemKind, LoopSource, MatchSource, MutTy, Node, QPath, TraitItem,
- TraitItemKind, Ty, TyKind, UnOp, UnsafeSource, Unsafety, Variant, VariantData, YieldSource,
+ Block, BlockCheckMode, Body, Closure, Destination, Expr, ExprKind, FieldDef, FnHeader, HirId, Impl, ImplItem,
+ ImplItemKind, IsAuto, Item, ItemKind, LoopSource, MatchSource, MutTy, Node, QPath, TraitItem, TraitItemKind, Ty,
+ TyKind, UnOp, UnsafeSource, Unsafety, Variant, VariantData, YieldSource,
};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
-use rustc_span::{symbol::Ident, Span, Symbol};
+use rustc_span::symbol::Ident;
+use rustc_span::{Span, Symbol};
use rustc_target::spec::abi::Abi;
/// The search pattern to look for. Used by `span_matches_pat`
@@ -149,7 +149,7 @@ fn expr_search_pat(tcx: TyCtxt<'_>, e: &Expr<'_>) -> (Pat, Pat) {
(Pat::Str("for"), Pat::Str("}"))
},
ExprKind::Match(_, _, MatchSource::Normal) => (Pat::Str("match"), Pat::Str("}")),
- ExprKind::Match(e, _, MatchSource::TryDesugar) => (expr_search_pat(tcx, e).0, Pat::Str("?")),
+ ExprKind::Match(e, _, MatchSource::TryDesugar(_)) => (expr_search_pat(tcx, e).0, Pat::Str("?")),
ExprKind::Match(e, _, MatchSource::AwaitDesugar) | ExprKind::Yield(e, YieldSource::Await { .. }) => {
(expr_search_pat(tcx, e).0, Pat::Str("await"))
},
@@ -163,7 +163,7 @@ fn expr_search_pat(tcx: TyCtxt<'_>, e: &Expr<'_>) -> (Pat, Pat) {
) => (Pat::Str("unsafe"), Pat::Str("}")),
ExprKind::Block(_, None) => (Pat::Str("{"), Pat::Str("}")),
ExprKind::Field(e, name) => (expr_search_pat(tcx, e).0, Pat::Sym(name.name)),
- ExprKind::Index(e, _) => (expr_search_pat(tcx, e).0, Pat::Str("]")),
+ ExprKind::Index(e, _, _) => (expr_search_pat(tcx, e).0, Pat::Str("]")),
ExprKind::Path(ref path) => qpath_search_pat(path),
ExprKind::AddrOf(_, _, e) => (Pat::Str("&"), expr_search_pat(tcx, e).1),
ExprKind::Break(Destination { label: None, .. }, None) => (Pat::Str("break"), Pat::Str("break")),
@@ -339,7 +339,7 @@ fn ty_search_pat(ty: &Ty<'_>) -> (Pat, Pat) {
TyKind::Tup(..) => (Pat::Str("("), Pat::Str(")")),
TyKind::OpaqueDef(..) => (Pat::Str("impl"), Pat::Str("")),
TyKind::Path(qpath) => qpath_search_pat(&qpath),
- // NOTE: This is missing `TraitObject`. It always return true then.
+ // NOTE: This is missing `TraitObject`. It will always return true then.
_ => (Pat::Str(""), Pat::Str("")),
}
}
diff --git a/src/tools/clippy/clippy_utils/src/comparisons.rs b/src/tools/clippy/clippy_utils/src/comparisons.rs
index 7a18d5e81..5e6bf2278 100644
--- a/src/tools/clippy/clippy_utils/src/comparisons.rs
+++ b/src/tools/clippy/clippy_utils/src/comparisons.rs
@@ -1,11 +1,11 @@
-//! Utility functions about comparison operators.
+//! Utility functions for comparison operators.
#![deny(clippy::missing_docs_in_private_items)]
use rustc_hir::{BinOpKind, Expr};
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
-/// Represent a normalized comparison operator.
+/// Represents a normalized comparison operator.
pub enum Rel {
/// `<`
Lt,
diff --git a/src/tools/clippy/clippy_utils/src/consts.rs b/src/tools/clippy/clippy_utils/src/consts.rs
index d1cfdc496..6d57af325 100644
--- a/src/tools/clippy/clippy_utils/src/consts.rs
+++ b/src/tools/clippy/clippy_utils/src/consts.rs
@@ -9,11 +9,9 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, QPath, UnOp};
use rustc_lexer::tokenize;
use rustc_lint::LateContext;
-use rustc_middle::mir;
use rustc_middle::mir::interpret::Scalar;
-use rustc_middle::ty::{self, EarlyBinder, FloatTy, ScalarInt, Ty, TyCtxt};
-use rustc_middle::ty::{List, SubstsRef};
-use rustc_middle::{bug, span_bug};
+use rustc_middle::ty::{self, EarlyBinder, FloatTy, GenericArgsRef, List, ScalarInt, Ty, TyCtxt};
+use rustc_middle::{bug, mir, span_bug};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::SyntaxContext;
use std::cmp::Ordering::{self, Equal};
@@ -155,7 +153,7 @@ impl<'tcx> Constant<'tcx> {
},
(Self::Vec(l), Self::Vec(r)) => {
let (ty::Array(cmp_type, _) | ty::Slice(cmp_type)) = *cmp_type.kind() else {
- return None
+ return None;
};
iter::zip(l, r)
.map(|(li, ri)| Self::partial_cmp(tcx, cmp_type, li, ri))
@@ -267,7 +265,7 @@ pub fn constant_with_source<'tcx>(
res.map(|x| (x, ctxt.source))
}
-/// Attempts to evaluate an expression only if it's value is not dependent on other items.
+/// Attempts to evaluate an expression only if its value is not dependent on other items.
pub fn constant_simple<'tcx>(
lcx: &LateContext<'tcx>,
typeck_results: &ty::TypeckResults<'tcx>,
@@ -327,17 +325,17 @@ pub struct ConstEvalLateContext<'a, 'tcx> {
typeck_results: &'a ty::TypeckResults<'tcx>,
param_env: ty::ParamEnv<'tcx>,
source: ConstantSource,
- substs: SubstsRef<'tcx>,
+ args: GenericArgsRef<'tcx>,
}
impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
- fn new(lcx: &'a LateContext<'tcx>, typeck_results: &'a ty::TypeckResults<'tcx>) -> Self {
+ pub fn new(lcx: &'a LateContext<'tcx>, typeck_results: &'a ty::TypeckResults<'tcx>) -> Self {
Self {
lcx,
typeck_results,
param_env: lcx.param_env,
source: ConstantSource::Local,
- substs: List::empty(),
+ args: List::empty(),
}
}
@@ -396,7 +394,7 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
}
}
},
- ExprKind::Index(arr, index) => self.index(arr, index),
+ ExprKind::Index(arr, index, _) => self.index(arr, index),
ExprKind::AddrOf(_, _, inner) => self.expr(inner).map(|r| Constant::Ref(Box::new(r))),
ExprKind::Field(local_expr, ref field) => {
let result = self.expr(local_expr);
@@ -463,7 +461,7 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
// Check if this constant is based on `cfg!(..)`,
// which is NOT constant for our purposes.
if let Some(node) = self.lcx.tcx.hir().get_if_local(def_id)
- && let Node::Item(Item { kind: ItemKind::Const(_, body_id), .. }) = node
+ && let Node::Item(Item { kind: ItemKind::Const(.., body_id), .. }) = node
&& let Node::Expr(Expr { kind: ExprKind::Lit(_), span, .. }) = self.lcx
.tcx
.hir()
@@ -473,16 +471,16 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
return None;
}
- let substs = self.typeck_results.node_substs(id);
- let substs = if self.substs.is_empty() {
- substs
+ let args = self.typeck_results.node_args(id);
+ let args = if self.args.is_empty() {
+ args
} else {
- EarlyBinder::bind(substs).subst(self.lcx.tcx, self.substs)
+ EarlyBinder::bind(args).instantiate(self.lcx.tcx, self.args)
};
let result = self
.lcx
.tcx
- .const_eval_resolve(self.param_env, mir::UnevaluatedConst::new(def_id, substs), None)
+ .const_eval_resolve(self.param_env, mir::UnevaluatedConst::new(def_id, args), None)
.ok()
.map(|val| rustc_middle::mir::ConstantKind::from_value(val, ty))?;
let result = miri_to_const(self.lcx, result)?;
@@ -726,7 +724,7 @@ fn field_of_struct<'tcx>(
field: &Ident,
) -> Option<mir::ConstantKind<'tcx>> {
if let mir::ConstantKind::Val(result, ty) = result
- && let Some(dc) = lcx.tcx.try_destructure_mir_constant_for_diagnostics((result, ty))
+ && let Some(dc) = rustc_const_eval::const_eval::try_destructure_mir_constant_for_diagnostics(lcx.tcx, result, ty)
&& let Some(dc_variant) = dc.variant
&& let Some(variant) = adt_def.variants().get(dc_variant)
&& let Some(field_idx) = variant.fields.iter().position(|el| el.name == field.name)
diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
index 4a845ca63..0bcefba75 100644
--- a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
+++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs
@@ -1,7 +1,7 @@
//! Utilities for evaluating whether eagerly evaluated expressions can be made lazy and vice versa.
//!
//! Things to consider:
-//! - has the expression side-effects?
+//! - does the expression have side-effects?
//! - is the expression computationally expensive?
//!
//! See lints:
@@ -12,14 +12,14 @@
use crate::ty::{all_predicates_of, is_copy};
use crate::visitors::is_const_evaluatable;
use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_expr, Visitor};
-use rustc_hir::{def_id::DefId, Block, Expr, ExprKind, QPath, UnOp};
+use rustc_hir::{Block, Expr, ExprKind, QPath, UnOp};
use rustc_lint::LateContext;
use rustc_middle::ty;
use rustc_middle::ty::adjustment::Adjust;
use rustc_span::{sym, Symbol};
-use std::cmp;
-use std::ops;
+use std::{cmp, ops};
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
enum EagernessSuggestion {
@@ -51,7 +51,7 @@ fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg:
let name = name.as_str();
let ty = match cx.tcx.impl_of_method(fn_id) {
- Some(id) => cx.tcx.type_of(id).subst_identity(),
+ Some(id) => cx.tcx.type_of(id).instantiate_identity(),
None => return Lazy,
};
@@ -68,19 +68,24 @@ fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg:
// Types where the only fields are generic types (or references to) with no trait bounds other
// than marker traits.
// Due to the limited operations on these types functions should be fairly cheap.
- if def
- .variants()
- .iter()
- .flat_map(|v| v.fields.iter())
- .any(|x| matches!(cx.tcx.type_of(x.did).subst_identity().peel_refs().kind(), ty::Param(_)))
- && all_predicates_of(cx.tcx, fn_id).all(|(pred, _)| match pred.kind().skip_binder() {
- ty::ClauseKind::Trait(pred) => cx.tcx.trait_def(pred.trait_ref.def_id).is_marker,
- _ => true,
- })
- && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_)))
+ if def.variants().iter().flat_map(|v| v.fields.iter()).any(|x| {
+ matches!(
+ cx.tcx.type_of(x.did).instantiate_identity().peel_refs().kind(),
+ ty::Param(_)
+ )
+ }) && all_predicates_of(cx.tcx, fn_id).all(|(pred, _)| match pred.kind().skip_binder() {
+ ty::ClauseKind::Trait(pred) => cx.tcx.trait_def(pred.trait_ref.def_id).is_marker,
+ _ => true,
+ }) && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_)))
{
// Limit the function to either `(self) -> bool` or `(&self) -> bool`
- match &**cx.tcx.fn_sig(fn_id).subst_identity().skip_binder().inputs_and_output {
+ match &**cx
+ .tcx
+ .fn_sig(fn_id)
+ .instantiate_identity()
+ .skip_binder()
+ .inputs_and_output
+ {
[arg, res] if !arg.is_mutable_ptr() && arg.peel_refs() == ty && res.is_bool() => NoChange,
_ => Lazy,
}
@@ -180,7 +185,7 @@ fn expr_eagerness<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> EagernessS
.type_dependent_def_id(e.hir_id)
.map_or(Lazy, |id| fn_eagerness(self.cx, id, name.ident.name, true));
},
- ExprKind::Index(_, e) => {
+ ExprKind::Index(_, e, _) => {
let ty = self.cx.typeck_results().expr_ty_adjusted(e);
if is_copy(self.cx, ty) && !ty.is_ref() {
self.eagerness |= NoChange;
diff --git a/src/tools/clippy/clippy_utils/src/higher.rs b/src/tools/clippy/clippy_utils/src/higher.rs
index a61e4c380..802adbd4d 100644
--- a/src/tools/clippy/clippy_utils/src/higher.rs
+++ b/src/tools/clippy/clippy_utils/src/higher.rs
@@ -13,7 +13,7 @@ use rustc_lint::LateContext;
use rustc_span::{sym, symbol, Span};
/// The essential nodes of a desugared for loop as well as the entire span:
-/// `for pat in arg { body }` becomes `(pat, arg, body)`. Return `(pat, arg, body, span)`.
+/// `for pat in arg { body }` becomes `(pat, arg, body)`. Returns `(pat, arg, body, span)`.
pub struct ForLoop<'tcx> {
/// `for` loop item
pub pat: &'tcx hir::Pat<'tcx>,
@@ -138,6 +138,7 @@ impl<'hir> IfLet<'hir> {
}
/// An `if let` or `match` expression. Useful for lints that trigger on one or the other.
+#[derive(Debug)]
pub enum IfLetOrMatch<'hir> {
/// Any `match` expression
Match(&'hir Expr<'hir>, &'hir [Arm<'hir>], MatchSource),
@@ -264,7 +265,7 @@ impl<'a> Range<'a> {
}
}
-/// Represent the pre-expansion arguments of a `vec!` invocation.
+/// Represents the pre-expansion arguments of a `vec!` invocation.
pub enum VecArgs<'a> {
/// `vec![elem; len]`
Repeat(&'a hir::Expr<'a>, &'a hir::Expr<'a>),
@@ -398,7 +399,7 @@ impl<'hir> WhileLet<'hir> {
}
}
-/// Converts a hir binary operator to the corresponding `ast` type.
+/// Converts a `hir` binary operator to the corresponding `ast` type.
#[must_use]
pub fn binop(op: hir::BinOpKind) -> ast::BinOpKind {
match op {
@@ -436,7 +437,7 @@ pub enum VecInitKind {
WithExprCapacity(HirId),
}
-/// Checks if given expression is an initialization of `Vec` and returns its kind.
+/// Checks if the given expression is an initialization of `Vec` and returns its kind.
pub fn get_vec_init_kind<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<VecInitKind> {
if let ExprKind::Call(func, args) = expr.kind {
match func.kind {
diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs
index 3e1d73564..fdc35cd4d 100644
--- a/src/tools/clippy/clippy_utils/src/hir_utils.rs
+++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs
@@ -5,10 +5,9 @@ use crate::tokenize_with_text;
use rustc_ast::ast::InlineAsmTemplatePiece;
use rustc_data_structures::fx::FxHasher;
use rustc_hir::def::Res;
-use rustc_hir::HirIdMap;
use rustc_hir::{
ArrayLen, BinOpKind, BindingAnnotation, Block, BodyId, Closure, Expr, ExprField, ExprKind, FnRetTy, GenericArg,
- GenericArgs, Guard, HirId, InlineAsmOperand, Let, Lifetime, LifetimeName, Pat, PatField, PatKind, Path,
+ GenericArgs, Guard, HirId, HirIdMap, InlineAsmOperand, Let, Lifetime, LifetimeName, Pat, PatField, PatKind, Path,
PathSegment, PrimTy, QPath, Stmt, StmtKind, Ty, TyKind, TypeBinding,
};
use rustc_lexer::{tokenize, TokenKind};
@@ -253,15 +252,15 @@ impl HirEqInterExpr<'_, '_, '_> {
return false;
}
- if let Some((typeck_lhs, typeck_rhs)) = self.inner.maybe_typeck_results {
- if let (Some(l), Some(r)) = (
+ if let Some((typeck_lhs, typeck_rhs)) = self.inner.maybe_typeck_results
+ && typeck_lhs.expr_ty(left) == typeck_rhs.expr_ty(right)
+ && let (Some(l), Some(r)) = (
constant_simple(self.inner.cx, typeck_lhs, left),
constant_simple(self.inner.cx, typeck_rhs, right),
- ) {
- if l == r {
- return true;
- }
- }
+ )
+ && l == r
+ {
+ return true;
}
let is_eq = match (
@@ -300,7 +299,7 @@ impl HirEqInterExpr<'_, '_, '_> {
(&ExprKind::Field(l_f_exp, ref l_f_ident), &ExprKind::Field(r_f_exp, ref r_f_ident)) => {
l_f_ident.name == r_f_ident.name && self.eq_expr(l_f_exp, r_f_exp)
},
- (&ExprKind::Index(la, li), &ExprKind::Index(ra, ri)) => self.eq_expr(la, ra) && self.eq_expr(li, ri),
+ (&ExprKind::Index(la, li, _), &ExprKind::Index(ra, ri, _)) => self.eq_expr(la, ra) && self.eq_expr(li, ri),
(&ExprKind::If(lc, lt, ref le), &ExprKind::If(rc, rt, ref re)) => {
self.eq_expr(lc, rc) && self.eq_expr(lt, rt) && both(le, re, |l, r| self.eq_expr(l, r))
},
@@ -495,10 +494,13 @@ impl HirEqInterExpr<'_, '_, '_> {
loop {
use TokenKind::{BlockComment, LineComment, Whitespace};
if left_data.macro_def_id != right_data.macro_def_id
- || (matches!(left_data.kind, ExpnKind::Macro(MacroKind::Bang, name) if name == sym::cfg)
- && !eq_span_tokens(self.inner.cx, left_data.call_site, right_data.call_site, |t| {
- !matches!(t, Whitespace | LineComment { .. } | BlockComment { .. })
- }))
+ || (matches!(
+ left_data.kind,
+ ExpnKind::Macro(MacroKind::Bang, name)
+ if name == sym::cfg || name == sym::option_env
+ ) && !eq_span_tokens(self.inner.cx, left_data.call_site, right_data.call_site, |t| {
+ !matches!(t, Whitespace | LineComment { .. } | BlockComment { .. })
+ }))
{
// Either a different chain of macro calls, or different arguments to the `cfg` macro.
return false;
@@ -728,7 +730,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
self.hash_expr(e);
self.hash_name(f.name);
},
- ExprKind::Index(a, i) => {
+ ExprKind::Index(a, i, _) => {
self.hash_expr(a);
self.hash_expr(i);
},
diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs
index 727b59f1f..6c4cec595 100644
--- a/src/tools/clippy/clippy_utils/src/lib.rs
+++ b/src/tools/clippy/clippy_utils/src/lib.rs
@@ -74,8 +74,7 @@ pub use self::hir_utils::{
use core::ops::ControlFlow;
use std::collections::hash_map::Entry;
use std::hash::BuildHasherDefault;
-use std::sync::OnceLock;
-use std::sync::{Mutex, MutexGuard};
+use std::sync::{Mutex, MutexGuard, OnceLock};
use if_chain::if_chain;
use itertools::Itertools;
@@ -84,41 +83,40 @@ use rustc_ast::Attribute;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::unhash::UnhashMap;
use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
+use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalModDefId, LOCAL_CRATE};
use rustc_hir::hir_id::{HirIdMap, HirIdSet};
use rustc_hir::intravisit::{walk_expr, FnKind, Visitor};
-use rustc_hir::LangItem::{OptionNone, ResultErr, ResultOk};
+use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk};
use rustc_hir::{
self as hir, def, Arm, ArrayLen, BindingAnnotation, Block, BlockCheckMode, Body, Closure, Destination, Expr,
- ExprKind, FnDecl, HirId, Impl, ImplItem, ImplItemKind, ImplItemRef, IsAsync, Item, ItemKind, LangItem, Local,
- MatchSource, Mutability, Node, OwnerId, Param, Pat, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind,
- TraitItem, TraitItemRef, TraitRef, TyKind, UnOp,
+ ExprField, ExprKind, FnDecl, FnRetTy, GenericArgs, HirId, Impl, ImplItem, ImplItemKind, ImplItemRef, IsAsync, Item,
+ ItemKind, LangItem, Local, MatchSource, Mutability, Node, OwnerId, Param, Pat, PatKind, Path, PathSegment, PrimTy,
+ QPath, Stmt, StmtKind, TraitItem, TraitItemKind, TraitItemRef, TraitRef, TyKind, UnOp,
};
use rustc_lexer::{tokenize, TokenKind};
use rustc_lint::{LateContext, Level, Lint, LintContext};
use rustc_middle::hir::place::PlaceBase;
use rustc_middle::mir::ConstantKind;
-use rustc_middle::ty as rustc_ty;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc_middle::ty::binding::BindingMode;
-use rustc_middle::ty::fast_reject::SimplifiedType::{
- ArraySimplifiedType, BoolSimplifiedType, CharSimplifiedType, FloatSimplifiedType, IntSimplifiedType,
- PtrSimplifiedType, SliceSimplifiedType, StrSimplifiedType, UintSimplifiedType,
-};
+use rustc_middle::ty::fast_reject::SimplifiedType;
+use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::ty::{
- layout::IntegerExt, BorrowKind, ClosureKind, Ty, TyCtxt, TypeAndMut, TypeVisitableExt, UpvarCapture,
+ self as rustc_ty, Binder, BorrowKind, ClosureKind, FloatTy, IntTy, ParamEnv, ParamEnvAnd, Ty, TyCtxt, TypeAndMut,
+ TypeVisitableExt, UintTy, UpvarCapture,
};
-use rustc_middle::ty::{FloatTy, IntTy, UintTy};
use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::source_map::SourceMap;
-use rustc_span::sym;
use rustc_span::symbol::{kw, Ident, Symbol};
-use rustc_span::Span;
+use rustc_span::{sym, Span};
use rustc_target::abi::Integer;
use crate::consts::{constant, miri_to_const, Constant};
use crate::higher::Range;
-use crate::ty::{can_partially_move_ty, expr_sig, is_copy, is_recursively_primitive_type, ty_is_fn_once_param};
+use crate::ty::{
+ adt_and_variant_of_res, can_partially_move_ty, expr_sig, is_copy, is_recursively_primitive_type,
+ ty_is_fn_once_param,
+};
use crate::visitors::for_each_expr;
use rustc_middle::hir::nested_filter;
@@ -288,7 +286,7 @@ pub fn is_wild(pat: &Pat<'_>) -> bool {
/// Checks if the given `QPath` belongs to a type alias.
pub fn is_ty_alias(qpath: &QPath<'_>) -> bool {
match *qpath {
- QPath::Resolved(_, path) => matches!(path.res, Res::Def(DefKind::TyAlias | DefKind::AssocTy, ..)),
+ QPath::Resolved(_, path) => matches!(path.res, Res::Def(DefKind::TyAlias { .. } | DefKind::AssocTy, ..)),
QPath::TypeRelative(ty, _) if let TyKind::Path(qpath) = ty.kind => { is_ty_alias(&qpath) },
_ => false,
}
@@ -305,7 +303,7 @@ pub fn match_trait_method(cx: &LateContext<'_>, expr: &Expr<'_>, path: &[&str])
/// Checks if a method is defined in an impl of a diagnostic item
pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool {
if let Some(impl_did) = cx.tcx.impl_of_method(def_id) {
- if let Some(adt) = cx.tcx.type_of(impl_did).subst_identity().ty_adt_def() {
+ if let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() {
return cx.tcx.is_diagnostic_item(diag_item, adt.did());
}
}
@@ -514,30 +512,30 @@ pub fn path_def_id<'tcx>(cx: &LateContext<'_>, maybe_path: &impl MaybePath<'tcx>
fn find_primitive_impls<'tcx>(tcx: TyCtxt<'tcx>, name: &str) -> impl Iterator<Item = DefId> + 'tcx {
let ty = match name {
- "bool" => BoolSimplifiedType,
- "char" => CharSimplifiedType,
- "str" => StrSimplifiedType,
- "array" => ArraySimplifiedType,
- "slice" => SliceSimplifiedType,
+ "bool" => SimplifiedType::Bool,
+ "char" => SimplifiedType::Char,
+ "str" => SimplifiedType::Str,
+ "array" => SimplifiedType::Array,
+ "slice" => SimplifiedType::Slice,
// FIXME: rustdoc documents these two using just `pointer`.
//
// Maybe this is something we should do here too.
- "const_ptr" => PtrSimplifiedType(Mutability::Not),
- "mut_ptr" => PtrSimplifiedType(Mutability::Mut),
- "isize" => IntSimplifiedType(IntTy::Isize),
- "i8" => IntSimplifiedType(IntTy::I8),
- "i16" => IntSimplifiedType(IntTy::I16),
- "i32" => IntSimplifiedType(IntTy::I32),
- "i64" => IntSimplifiedType(IntTy::I64),
- "i128" => IntSimplifiedType(IntTy::I128),
- "usize" => UintSimplifiedType(UintTy::Usize),
- "u8" => UintSimplifiedType(UintTy::U8),
- "u16" => UintSimplifiedType(UintTy::U16),
- "u32" => UintSimplifiedType(UintTy::U32),
- "u64" => UintSimplifiedType(UintTy::U64),
- "u128" => UintSimplifiedType(UintTy::U128),
- "f32" => FloatSimplifiedType(FloatTy::F32),
- "f64" => FloatSimplifiedType(FloatTy::F64),
+ "const_ptr" => SimplifiedType::Ptr(Mutability::Not),
+ "mut_ptr" => SimplifiedType::Ptr(Mutability::Mut),
+ "isize" => SimplifiedType::Int(IntTy::Isize),
+ "i8" => SimplifiedType::Int(IntTy::I8),
+ "i16" => SimplifiedType::Int(IntTy::I16),
+ "i32" => SimplifiedType::Int(IntTy::I32),
+ "i64" => SimplifiedType::Int(IntTy::I64),
+ "i128" => SimplifiedType::Int(IntTy::I128),
+ "usize" => SimplifiedType::Uint(UintTy::Usize),
+ "u8" => SimplifiedType::Uint(UintTy::U8),
+ "u16" => SimplifiedType::Uint(UintTy::U16),
+ "u32" => SimplifiedType::Uint(UintTy::U32),
+ "u64" => SimplifiedType::Uint(UintTy::U64),
+ "u128" => SimplifiedType::Uint(UintTy::U128),
+ "f32" => SimplifiedType::Float(FloatTy::F32),
+ "f64" => SimplifiedType::Float(FloatTy::F64),
_ => return [].iter().copied(),
};
@@ -737,7 +735,7 @@ fn projection_stack<'a, 'hir>(mut e: &'a Expr<'hir>) -> (Vec<&'a Expr<'hir>>, &'
let mut result = vec![];
let root = loop {
match e.kind {
- ExprKind::Index(ep, _) | ExprKind::Field(ep, _) => {
+ ExprKind::Index(ep, _, _) | ExprKind::Field(ep, _) => {
result.push(e);
e = ep;
},
@@ -784,7 +782,7 @@ pub fn can_mut_borrow_both(cx: &LateContext<'_>, e1: &Expr<'_>, e2: &Expr<'_>) -
return true;
}
},
- (ExprKind::Index(_, i1), ExprKind::Index(_, i2)) => {
+ (ExprKind::Index(_, i1, _), ExprKind::Index(_, i2, _)) => {
if !eq_expr_value(cx, i1, i2) {
return false;
}
@@ -812,7 +810,7 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath<
if let QPath::TypeRelative(_, method) = path {
if method.ident.name == sym::new {
if let Some(impl_did) = cx.tcx.impl_of_method(def_id) {
- if let Some(adt) = cx.tcx.type_of(impl_did).subst_identity().ty_adt_def() {
+ if let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() {
return std_types_symbols.iter().any(|&symbol| {
cx.tcx.is_diagnostic_item(symbol, adt.did()) || Some(adt.did()) == cx.tcx.lang_items().string()
});
@@ -823,7 +821,7 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath<
false
}
-/// Return true if the expr is equal to `Default::default` when evaluated.
+/// Returns true if the expr is equal to `Default::default` when evaluated.
pub fn is_default_equivalent_call(cx: &LateContext<'_>, repl_func: &Expr<'_>) -> bool {
if_chain! {
if let hir::ExprKind::Path(ref repl_func_qpath) = repl_func.kind;
@@ -1377,7 +1375,7 @@ pub fn get_enclosing_loop_or_multi_call_closure<'tcx>(
.chain(args.iter())
.position(|arg| arg.hir_id == id)?;
let id = cx.typeck_results().type_dependent_def_id(e.hir_id)?;
- let ty = cx.tcx.fn_sig(id).subst_identity().skip_binder().inputs()[i];
+ let ty = cx.tcx.fn_sig(id).instantiate_identity().skip_binder().inputs()[i];
ty_is_fn_once_param(cx.tcx, ty, cx.tcx.param_env(id).caller_bounds()).then_some(())
},
_ => None,
@@ -1639,13 +1637,13 @@ pub fn is_direct_expn_of(span: Span, name: &str) -> Option<Span> {
/// Convenience function to get the return type of a function.
pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId) -> Ty<'tcx> {
- let ret_ty = cx.tcx.fn_sig(fn_def_id).subst_identity().output();
+ let ret_ty = cx.tcx.fn_sig(fn_def_id).instantiate_identity().output();
cx.tcx.erase_late_bound_regions(ret_ty)
}
/// Convenience function to get the nth argument type of a function.
pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId, nth: usize) -> Ty<'tcx> {
- let arg = cx.tcx.fn_sig(fn_def_id).subst_identity().input(nth);
+ let arg = cx.tcx.fn_sig(fn_def_id).instantiate_identity().input(nth);
cx.tcx.erase_late_bound_regions(arg)
}
@@ -1767,7 +1765,7 @@ pub fn is_try<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'tcx>) -> Option<&'tc
if let ExprKind::Match(_, arms, ref source) = expr.kind {
// desugared from a `?` operator
- if *source == MatchSource::TryDesugar {
+ if let MatchSource::TryDesugar(_) = *source {
return Some(expr);
}
@@ -2372,11 +2370,11 @@ pub fn is_hir_ty_cfg_dependant(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool {
false
}
-static TEST_ITEM_NAMES_CACHE: OnceLock<Mutex<FxHashMap<LocalDefId, Vec<Symbol>>>> = OnceLock::new();
+static TEST_ITEM_NAMES_CACHE: OnceLock<Mutex<FxHashMap<LocalModDefId, Vec<Symbol>>>> = OnceLock::new();
-fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalDefId, f: impl Fn(&[Symbol]) -> bool) -> bool {
+fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalModDefId, f: impl Fn(&[Symbol]) -> bool) -> bool {
let cache = TEST_ITEM_NAMES_CACHE.get_or_init(|| Mutex::new(FxHashMap::default()));
- let mut map: MutexGuard<'_, FxHashMap<LocalDefId, Vec<Symbol>>> = cache.lock().unwrap();
+ let mut map: MutexGuard<'_, FxHashMap<LocalModDefId, Vec<Symbol>>> = cache.lock().unwrap();
let value = map.entry(module);
match value {
Entry::Occupied(entry) => f(entry.get()),
@@ -2385,7 +2383,7 @@ fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalDefId, f: impl Fn(&[Symbol
for id in tcx.hir().module_items(module) {
if matches!(tcx.def_kind(id.owner_id), DefKind::Const)
&& let item = tcx.hir().item(id)
- && let ItemKind::Const(ty, _body) = item.kind {
+ && let ItemKind::Const(ty, _generics, _body) = item.kind {
if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind {
// We could also check for the type name `test::TestDescAndFn`
if let Res::Def(DefKind::Struct, _) = path.res {
@@ -2450,6 +2448,17 @@ pub fn is_in_cfg_test(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
.any(is_cfg_test)
}
+/// Checks if the item of any of its parents has `#[cfg(...)]` attribute applied.
+pub fn inherits_cfg(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
+ let hir = tcx.hir();
+
+ tcx.has_attr(def_id, sym::cfg)
+ || hir
+ .parent_iter(hir.local_def_id_to_hir_id(def_id))
+ .flat_map(|(parent_id, _)| hir.attrs(parent_id))
+ .any(|attr| attr.has_name(sym::cfg))
+}
+
/// Checks whether item either has `test` attribute applied, or
/// is a module with `test` in its name.
///
@@ -2504,6 +2513,262 @@ pub fn walk_to_expr_usage<'tcx, T>(
None
}
+/// A type definition as it would be viewed from within a function.
+#[derive(Clone, Copy)]
+pub enum DefinedTy<'tcx> {
+ // Used for locals and closures defined within the function.
+ Hir(&'tcx hir::Ty<'tcx>),
+ /// Used for function signatures, and constant and static values. This includes the `ParamEnv`
+ /// from the definition site.
+ Mir(ParamEnvAnd<'tcx, Binder<'tcx, Ty<'tcx>>>),
+}
+
+/// The context an expressions value is used in.
+pub struct ExprUseCtxt<'tcx> {
+ /// The parent node which consumes the value.
+ pub node: ExprUseNode<'tcx>,
+ /// Any adjustments applied to the type.
+ pub adjustments: &'tcx [Adjustment<'tcx>],
+ /// Whether or not the type must unify with another code path.
+ pub is_ty_unified: bool,
+ /// Whether or not the value will be moved before it's used.
+ pub moved_before_use: bool,
+}
+
+/// The node which consumes a value.
+pub enum ExprUseNode<'tcx> {
+ /// Assignment to, or initializer for, a local
+ Local(&'tcx Local<'tcx>),
+ /// Initializer for a const or static item.
+ ConstStatic(OwnerId),
+ /// Implicit or explicit return from a function.
+ Return(OwnerId),
+ /// Initialization of a struct field.
+ Field(&'tcx ExprField<'tcx>),
+ /// An argument to a function.
+ FnArg(&'tcx Expr<'tcx>, usize),
+ /// An argument to a method.
+ MethodArg(HirId, Option<&'tcx GenericArgs<'tcx>>, usize),
+ /// The callee of a function call.
+ Callee,
+ /// Access of a field.
+ FieldAccess(Ident),
+}
+impl<'tcx> ExprUseNode<'tcx> {
+ /// Checks if the value is returned from the function.
+ pub fn is_return(&self) -> bool {
+ matches!(self, Self::Return(_))
+ }
+
+ /// Checks if the value is used as a method call receiver.
+ pub fn is_recv(&self) -> bool {
+ matches!(self, Self::MethodArg(_, _, 0))
+ }
+
+ /// Gets the needed type as it's defined without any type inference.
+ pub fn defined_ty(&self, cx: &LateContext<'tcx>) -> Option<DefinedTy<'tcx>> {
+ match *self {
+ Self::Local(Local { ty: Some(ty), .. }) => Some(DefinedTy::Hir(ty)),
+ Self::ConstStatic(id) => Some(DefinedTy::Mir(
+ cx.param_env
+ .and(Binder::dummy(cx.tcx.type_of(id).instantiate_identity())),
+ )),
+ Self::Return(id) => {
+ let hir_id = cx.tcx.hir().local_def_id_to_hir_id(id.def_id);
+ if let Some(Node::Expr(Expr {
+ kind: ExprKind::Closure(c),
+ ..
+ })) = cx.tcx.hir().find(hir_id)
+ {
+ match c.fn_decl.output {
+ FnRetTy::DefaultReturn(_) => None,
+ FnRetTy::Return(ty) => Some(DefinedTy::Hir(ty)),
+ }
+ } else {
+ Some(DefinedTy::Mir(
+ cx.param_env.and(cx.tcx.fn_sig(id).instantiate_identity().output()),
+ ))
+ }
+ },
+ Self::Field(field) => match get_parent_expr_for_hir(cx, field.hir_id) {
+ Some(Expr {
+ hir_id,
+ kind: ExprKind::Struct(path, ..),
+ ..
+ }) => adt_and_variant_of_res(cx, cx.qpath_res(path, *hir_id))
+ .and_then(|(adt, variant)| {
+ variant
+ .fields
+ .iter()
+ .find(|f| f.name == field.ident.name)
+ .map(|f| (adt, f))
+ })
+ .map(|(adt, field_def)| {
+ DefinedTy::Mir(
+ cx.tcx
+ .param_env(adt.did())
+ .and(Binder::dummy(cx.tcx.type_of(field_def.did).instantiate_identity())),
+ )
+ }),
+ _ => None,
+ },
+ Self::FnArg(callee, i) => {
+ let sig = expr_sig(cx, callee)?;
+ let (hir_ty, ty) = sig.input_with_hir(i)?;
+ Some(match hir_ty {
+ Some(hir_ty) => DefinedTy::Hir(hir_ty),
+ None => DefinedTy::Mir(
+ sig.predicates_id()
+ .map_or(ParamEnv::empty(), |id| cx.tcx.param_env(id))
+ .and(ty),
+ ),
+ })
+ },
+ Self::MethodArg(id, _, i) => {
+ let id = cx.typeck_results().type_dependent_def_id(id)?;
+ let sig = cx.tcx.fn_sig(id).skip_binder();
+ Some(DefinedTy::Mir(cx.tcx.param_env(id).and(sig.input(i))))
+ },
+ Self::Local(_) | Self::FieldAccess(..) | Self::Callee => None,
+ }
+ }
+}
+
+/// Gets the context an expression's value is used in.
+#[expect(clippy::too_many_lines)]
+pub fn expr_use_ctxt<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> Option<ExprUseCtxt<'tcx>> {
+ let mut adjustments = [].as_slice();
+ let mut is_ty_unified = false;
+ let mut moved_before_use = false;
+ let ctxt = e.span.ctxt();
+ walk_to_expr_usage(cx, e, &mut |parent, child_id| {
+ // LocalTableInContext returns the wrong lifetime, so go use `expr_adjustments` instead.
+ if adjustments.is_empty() && let Node::Expr(e) = cx.tcx.hir().get(child_id) {
+ adjustments = cx.typeck_results().expr_adjustments(e);
+ }
+ match parent {
+ Node::Local(l) if l.span.ctxt() == ctxt => Some(ExprUseCtxt {
+ node: ExprUseNode::Local(l),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+ Node::Item(&Item {
+ kind: ItemKind::Static(..) | ItemKind::Const(..),
+ owner_id,
+ span,
+ ..
+ })
+ | Node::TraitItem(&TraitItem {
+ kind: TraitItemKind::Const(..),
+ owner_id,
+ span,
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Const(..),
+ owner_id,
+ span,
+ ..
+ }) if span.ctxt() == ctxt => Some(ExprUseCtxt {
+ node: ExprUseNode::ConstStatic(owner_id),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+
+ Node::Item(&Item {
+ kind: ItemKind::Fn(..),
+ owner_id,
+ span,
+ ..
+ })
+ | Node::TraitItem(&TraitItem {
+ kind: TraitItemKind::Fn(..),
+ owner_id,
+ span,
+ ..
+ })
+ | Node::ImplItem(&ImplItem {
+ kind: ImplItemKind::Fn(..),
+ owner_id,
+ span,
+ ..
+ }) if span.ctxt() == ctxt => Some(ExprUseCtxt {
+ node: ExprUseNode::Return(owner_id),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+
+ Node::ExprField(field) if field.span.ctxt() == ctxt => Some(ExprUseCtxt {
+ node: ExprUseNode::Field(field),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+
+ Node::Expr(parent) if parent.span.ctxt() == ctxt => match parent.kind {
+ ExprKind::Ret(_) => Some(ExprUseCtxt {
+ node: ExprUseNode::Return(OwnerId {
+ def_id: cx.tcx.hir().body_owner_def_id(cx.enclosing_body.unwrap()),
+ }),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+ ExprKind::Closure(closure) => Some(ExprUseCtxt {
+ node: ExprUseNode::Return(OwnerId { def_id: closure.def_id }),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+ ExprKind::Call(func, args) => Some(ExprUseCtxt {
+ node: match args.iter().position(|arg| arg.hir_id == child_id) {
+ Some(i) => ExprUseNode::FnArg(func, i),
+ None => ExprUseNode::Callee,
+ },
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+ ExprKind::MethodCall(name, _, args, _) => Some(ExprUseCtxt {
+ node: ExprUseNode::MethodArg(
+ parent.hir_id,
+ name.args,
+ args.iter().position(|arg| arg.hir_id == child_id).map_or(0, |i| i + 1),
+ ),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+ ExprKind::Field(child, name) if child.hir_id == e.hir_id => Some(ExprUseCtxt {
+ node: ExprUseNode::FieldAccess(name),
+ adjustments,
+ is_ty_unified,
+ moved_before_use,
+ }),
+ ExprKind::If(e, _, _) | ExprKind::Match(e, _, _) if e.hir_id != child_id => {
+ is_ty_unified = true;
+ moved_before_use = true;
+ None
+ },
+ ExprKind::Block(_, Some(_)) | ExprKind::Break(..) => {
+ is_ty_unified = true;
+ moved_before_use = true;
+ None
+ },
+ ExprKind::Block(..) => {
+ moved_before_use = true;
+ None
+ },
+ _ => None,
+ },
+ _ => None,
+ }
+ })
+}
+
/// Tokenizes the input while keeping the text associated with each token.
pub fn tokenize_with_text(s: &str) -> impl Iterator<Item = (TokenKind, &str)> {
let mut pos = 0;
@@ -2518,7 +2783,9 @@ pub fn tokenize_with_text(s: &str) -> impl Iterator<Item = (TokenKind, &str)> {
/// Checks whether a given span has any comment token
/// This checks for all types of comment: line "//", block "/**", doc "///" "//!"
pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool {
- let Ok(snippet) = sm.span_to_snippet(span) else { return false };
+ let Ok(snippet) = sm.span_to_snippet(span) else {
+ return false;
+ };
return tokenize(&snippet).any(|token| {
matches!(
token.kind,
@@ -2527,7 +2794,8 @@ pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool {
});
}
-/// Return all the comments a given span contains
+/// Returns all the comments a given span contains
+///
/// Comments are returned wrapped with their relevant delimiters
pub fn span_extract_comment(sm: &SourceMap, span: Span) -> String {
let snippet = sm.span_to_snippet(span).unwrap_or_default();
@@ -2542,6 +2810,50 @@ pub fn span_find_starting_semi(sm: &SourceMap, span: Span) -> Span {
sm.span_take_while(span, |&ch| ch == ' ' || ch == ';')
}
+/// Returns whether the given let pattern and else body can be turned into a question mark
+///
+/// For this example:
+/// ```ignore
+/// let FooBar { a, b } = if let Some(a) = ex { a } else { return None };
+/// ```
+/// We get as parameters:
+/// ```ignore
+/// pat: Some(a)
+/// else_body: return None
+/// ```
+
+/// And for this example:
+/// ```ignore
+/// let Some(FooBar { a, b }) = ex else { return None };
+/// ```
+/// We get as parameters:
+/// ```ignore
+/// pat: Some(FooBar { a, b })
+/// else_body: return None
+/// ```
+
+/// We output `Some(a)` in the first instance, and `Some(FooBar { a, b })` in the second, because
+/// the question mark operator is applicable here. Callers have to check whether we are in a
+/// constant or not.
+pub fn pat_and_expr_can_be_question_mark<'a, 'hir>(
+ cx: &LateContext<'_>,
+ pat: &'a Pat<'hir>,
+ else_body: &Expr<'_>,
+) -> Option<&'a Pat<'hir>> {
+ if let PatKind::TupleStruct(pat_path, [inner_pat], _) = pat.kind &&
+ is_res_lang_ctor(cx, cx.qpath_res(&pat_path, pat.hir_id), OptionSome) &&
+ !is_refutable(cx, inner_pat) &&
+ let else_body = peel_blocks(else_body) &&
+ let ExprKind::Ret(Some(ret_val)) = else_body.kind &&
+ let ExprKind::Path(ret_path) = ret_val.kind &&
+ is_res_lang_ctor(cx, cx.qpath_res(&ret_path, ret_val.hir_id), OptionNone)
+ {
+ Some(inner_pat)
+ } else {
+ None
+ }
+}
+
macro_rules! op_utils {
($($name:ident $assign:ident)*) => {
/// Binary operation traits like `LangItem::Add`
diff --git a/src/tools/clippy/clippy_utils/src/macros.rs b/src/tools/clippy/clippy_utils/src/macros.rs
index 00f3abaec..173f9841d 100644
--- a/src/tools/clippy/clippy_utils/src/macros.rs
+++ b/src/tools/clippy/clippy_utils/src/macros.rs
@@ -192,7 +192,9 @@ pub fn first_node_in_macro(cx: &LateContext<'_>, node: &impl HirNode) -> Option<
/// Is `def_id` of `std::panic`, `core::panic` or any inner implementation macros
pub fn is_panic(cx: &LateContext<'_>, def_id: DefId) -> bool {
- let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { return false };
+ let Some(name) = cx.tcx.get_diagnostic_name(def_id) else {
+ return false;
+ };
matches!(
name,
sym::core_panic_macro
@@ -205,7 +207,9 @@ pub fn is_panic(cx: &LateContext<'_>, def_id: DefId) -> bool {
/// Is `def_id` of `assert!` or `debug_assert!`
pub fn is_assert_macro(cx: &LateContext<'_>, def_id: DefId) -> bool {
- let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { return false };
+ let Some(name) = cx.tcx.get_diagnostic_name(def_id) else {
+ return false;
+ };
matches!(name, sym::assert_macro | sym::debug_assert_macro)
}
@@ -223,13 +227,19 @@ pub enum PanicExpn<'a> {
impl<'a> PanicExpn<'a> {
pub fn parse(expr: &'a Expr<'a>) -> Option<Self> {
- let ExprKind::Call(callee, [arg, rest @ ..]) = &expr.kind else { return None };
- let ExprKind::Path(QPath::Resolved(_, path)) = &callee.kind else { return None };
+ let ExprKind::Call(callee, [arg, rest @ ..]) = &expr.kind else {
+ return None;
+ };
+ let ExprKind::Path(QPath::Resolved(_, path)) = &callee.kind else {
+ return None;
+ };
let result = match path.segments.last().unwrap().ident.as_str() {
"panic" if arg.span.ctxt() == expr.span.ctxt() => Self::Empty,
"panic" | "panic_str" => Self::Str(arg),
"panic_display" => {
- let ExprKind::AddrOf(_, _, e) = &arg.kind else { return None };
+ let ExprKind::AddrOf(_, _, e) = &arg.kind else {
+ return None;
+ };
Self::Display(e)
},
"panic_fmt" => Self::Format(arg),
diff --git a/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs b/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs
index 920ce8e65..703985b9d 100644
--- a/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs
+++ b/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs
@@ -1,11 +1,15 @@
-use super::{possible_origin::PossibleOriginVisitor, transitive_relation::TransitiveRelation};
+use super::possible_origin::PossibleOriginVisitor;
+use super::transitive_relation::TransitiveRelation;
use crate::ty::is_copy;
use rustc_data_structures::fx::FxHashMap;
use rustc_index::bit_set::{BitSet, HybridBitSet};
use rustc_lint::LateContext;
-use rustc_middle::mir::{self, visit::Visitor as _, Mutability};
-use rustc_middle::ty::{self, visit::TypeVisitor, TyCtxt};
-use rustc_mir_dataflow::{impls::MaybeStorageLive, Analysis, ResultsCursor};
+use rustc_middle::mir::visit::Visitor as _;
+use rustc_middle::mir::{self, Mutability};
+use rustc_middle::ty::visit::TypeVisitor;
+use rustc_middle::ty::{self, TyCtxt};
+use rustc_mir_dataflow::impls::MaybeStorageLive;
+use rustc_mir_dataflow::{Analysis, ResultsCursor};
use std::borrow::Cow;
use std::ops::ControlFlow;
diff --git a/src/tools/clippy/clippy_utils/src/mir/possible_origin.rs b/src/tools/clippy/clippy_utils/src/mir/possible_origin.rs
index 8e7513d74..da0426686 100644
--- a/src/tools/clippy/clippy_utils/src/mir/possible_origin.rs
+++ b/src/tools/clippy/clippy_utils/src/mir/possible_origin.rs
@@ -44,7 +44,7 @@ impl<'a, 'tcx> mir::visit::Visitor<'tcx> for PossibleOriginVisitor<'a, 'tcx> {
let lhs = place.local;
match rvalue {
// Only consider `&mut`, which can modify origin place
- mir::Rvalue::Ref(_, rustc_middle::mir::BorrowKind::Mut { .. }, borrowed) |
+ mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, borrowed) |
// _2: &mut _;
// _3 = move _2
mir::Rvalue::Use(mir::Operand::Move(borrowed)) |
diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs
index 0e6f01287..914ea85ac 100644
--- a/src/tools/clippy/clippy_utils/src/paths.rs
+++ b/src/tools/clippy/clippy_utils/src/paths.rs
@@ -94,12 +94,12 @@ pub const PUSH_STR: [&str; 4] = ["alloc", "string", "String", "push_str"];
pub const RANGE_ARGUMENT_TRAIT: [&str; 3] = ["core", "ops", "RangeBounds"];
pub const REFCELL_REF: [&str; 3] = ["core", "cell", "Ref"];
pub const REFCELL_REFMUT: [&str; 3] = ["core", "cell", "RefMut"];
-pub const REGEX_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "unicode", "RegexBuilder", "new"];
-pub const REGEX_BYTES_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "bytes", "RegexBuilder", "new"];
-pub const REGEX_BYTES_NEW: [&str; 4] = ["regex", "re_bytes", "Regex", "new"];
-pub const REGEX_BYTES_SET_NEW: [&str; 5] = ["regex", "re_set", "bytes", "RegexSet", "new"];
-pub const REGEX_NEW: [&str; 4] = ["regex", "re_unicode", "Regex", "new"];
-pub const REGEX_SET_NEW: [&str; 5] = ["regex", "re_set", "unicode", "RegexSet", "new"];
+pub const REGEX_BUILDER_NEW: [&str; 3] = ["regex", "RegexBuilder", "new"];
+pub const REGEX_BYTES_BUILDER_NEW: [&str; 4] = ["regex", "bytes", "RegexBuilder", "new"];
+pub const REGEX_BYTES_NEW: [&str; 4] = ["regex", "bytes", "Regex", "new"];
+pub const REGEX_BYTES_SET_NEW: [&str; 4] = ["regex", "bytes", "RegexSet", "new"];
+pub const REGEX_NEW: [&str; 3] = ["regex", "Regex", "new"];
+pub const REGEX_SET_NEW: [&str; 3] = ["regex", "RegexSet", "new"];
pub const SERDE_DESERIALIZE: [&str; 3] = ["serde", "de", "Deserialize"];
pub const SERDE_DE_VISITOR: [&str; 3] = ["serde", "de", "Visitor"];
pub const SLICE_FROM_RAW_PARTS: [&str; 4] = ["core", "slice", "raw", "from_raw_parts"];
@@ -149,6 +149,7 @@ pub const VEC_AS_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_slice"];
pub const VEC_DEQUE_ITER: [&str; 5] = ["alloc", "collections", "vec_deque", "VecDeque", "iter"];
pub const VEC_FROM_ELEM: [&str; 3] = ["alloc", "vec", "from_elem"];
pub const VEC_NEW: [&str; 4] = ["alloc", "vec", "Vec", "new"];
+pub const VEC_WITH_CAPACITY: [&str; 4] = ["alloc", "vec", "Vec", "with_capacity"];
pub const VEC_RESIZE: [&str; 4] = ["alloc", "vec", "Vec", "resize"];
pub const WEAK_ARC: [&str; 3] = ["alloc", "sync", "Weak"];
pub const WEAK_RC: [&str; 3] = ["alloc", "rc", "Weak"];
@@ -161,3 +162,6 @@ pub const OPTION_UNWRAP: [&str; 4] = ["core", "option", "Option", "unwrap"];
pub const OPTION_EXPECT: [&str; 4] = ["core", "option", "Option", "expect"];
pub const FORMATTER: [&str; 3] = ["core", "fmt", "Formatter"];
pub const DEBUG_STRUCT: [&str; 4] = ["core", "fmt", "builders", "DebugStruct"];
+pub const ORD_CMP: [&str; 4] = ["core", "cmp", "Ord", "cmp"];
+#[expect(clippy::invalid_paths)] // not sure why it thinks this, it works so
+pub const BOOL_THEN: [&str; 4] = ["core", "bool", "<impl bool>", "then"];
diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
index fbf4ab272..139e31bc5 100644
--- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
+++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs
@@ -14,10 +14,9 @@ use rustc_middle::mir::{
Body, CastKind, NonDivergingIntrinsic, NullOp, Operand, Place, ProjectionElem, Rvalue, Statement, StatementKind,
Terminator, TerminatorKind,
};
-use rustc_middle::traits::{ImplSource, ObligationCause};
-use rustc_middle::ty::subst::GenericArgKind;
-use rustc_middle::ty::{self, adjustment::PointerCoercion, Ty, TyCtxt};
-use rustc_middle::ty::{BoundConstness, TraitRef};
+use rustc_middle::traits::{BuiltinImplSource, ImplSource, ObligationCause};
+use rustc_middle::ty::adjustment::PointerCoercion;
+use rustc_middle::ty::{self, GenericArgKind, TraitRef, Ty, TyCtxt};
use rustc_semver::RustcVersion;
use rustc_span::symbol::sym;
use rustc_span::Span;
@@ -35,7 +34,7 @@ pub fn is_min_const_fn<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, msrv: &Msrv)
// impl trait is gone in MIR, so check the return type manually
check_ty(
tcx,
- tcx.fn_sig(def_id).subst_identity().output().skip_binder(),
+ tcx.fn_sig(def_id).instantiate_identity().output().skip_binder(),
body.local_decls.iter().next().unwrap().source_info.span,
)?;
@@ -125,7 +124,9 @@ fn check_rvalue<'tcx>(
) => check_operand(tcx, operand, span, body),
Rvalue::Cast(
CastKind::PointerCoercion(
- PointerCoercion::UnsafeFnPointer | PointerCoercion::ClosureFnPointer(_) | PointerCoercion::ReifyFnPointer,
+ PointerCoercion::UnsafeFnPointer
+ | PointerCoercion::ClosureFnPointer(_)
+ | PointerCoercion::ReifyFnPointer,
),
_,
_,
@@ -390,32 +391,39 @@ fn is_const_fn(tcx: TyCtxt<'_>, def_id: DefId, msrv: &Msrv) -> bool {
#[expect(clippy::similar_names)] // bit too pedantic
fn is_ty_const_destruct<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, body: &Body<'tcx>) -> bool {
- // Avoid selecting for simple cases, such as builtin types.
- if ty::util::is_trivially_const_drop(ty) {
- return true;
- }
+ // FIXME(effects, fee1-dead) revert to const destruct once it works again
+ #[expect(unused)]
+ fn is_ty_const_destruct_unused<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, body: &Body<'tcx>) -> bool {
+ // Avoid selecting for simple cases, such as builtin types.
+ if ty::util::is_trivially_const_drop(ty) {
+ return true;
+ }
- let obligation = Obligation::new(
- tcx,
- ObligationCause::dummy_with_span(body.span),
- ConstCx::new(tcx, body).param_env.with_const(),
- TraitRef::from_lang_item(tcx, LangItem::Destruct, body.span, [ty]).with_constness(BoundConstness::ConstIfConst),
- );
+ // FIXME(effects) constness
+ let obligation = Obligation::new(
+ tcx,
+ ObligationCause::dummy_with_span(body.span),
+ ConstCx::new(tcx, body).param_env,
+ TraitRef::from_lang_item(tcx, LangItem::Destruct, body.span, [ty]),
+ );
- let infcx = tcx.infer_ctxt().build();
- let mut selcx = SelectionContext::new(&infcx);
- let Some(impl_src) = selcx.select(&obligation).ok().flatten() else {
- return false;
- };
+ let infcx = tcx.infer_ctxt().build();
+ let mut selcx = SelectionContext::new(&infcx);
+ let Some(impl_src) = selcx.select(&obligation).ok().flatten() else {
+ return false;
+ };
+
+ if !matches!(
+ impl_src,
+ ImplSource::Builtin(BuiltinImplSource::Misc, _) | ImplSource::Param(_)
+ ) {
+ return false;
+ }
- if !matches!(
- impl_src,
- ImplSource::Builtin(_) | ImplSource::Param(_, ty::BoundConstness::ConstIfConst)
- ) {
- return false;
+ let ocx = ObligationCtxt::new(&infcx);
+ ocx.register_obligations(impl_src.nested_obligations());
+ ocx.select_all_or_error().is_empty()
}
- let ocx = ObligationCtxt::new(&infcx);
- ocx.register_obligations(impl_src.nested_obligations());
- ocx.select_all_or_error().is_empty()
+ !ty.needs_drop(tcx, ConstCx::new(tcx, body).param_env)
}
diff --git a/src/tools/clippy/clippy_utils/src/source.rs b/src/tools/clippy/clippy_utils/src/source.rs
index 582337b47..dc4ee7256 100644
--- a/src/tools/clippy/clippy_utils/src/source.rs
+++ b/src/tools/clippy/clippy_utils/src/source.rs
@@ -8,8 +8,7 @@ use rustc_hir::{BlockCheckMode, Expr, ExprKind, UnsafeSource};
use rustc_lint::{LateContext, LintContext};
use rustc_session::Session;
use rustc_span::source_map::{original_sp, SourceMap};
-use rustc_span::{hygiene, SourceFile};
-use rustc_span::{BytePos, Pos, Span, SpanData, SyntaxContext, DUMMY_SP};
+use rustc_span::{hygiene, BytePos, Pos, SourceFile, Span, SpanData, SyntaxContext, DUMMY_SP};
use std::borrow::Cow;
use std::ops::Range;
diff --git a/src/tools/clippy/clippy_utils/src/sugg.rs b/src/tools/clippy/clippy_utils/src/sugg.rs
index cf781e18c..ee5a49a20 100644
--- a/src/tools/clippy/clippy_utils/src/sugg.rs
+++ b/src/tools/clippy/clippy_utils/src/sugg.rs
@@ -395,7 +395,7 @@ fn binop_to_string(op: AssocOp, lhs: &str, rhs: &str) -> String {
}
}
-/// Return `true` if `sugg` is enclosed in parenthesis.
+/// Returns `true` if `sugg` is enclosed in parenthesis.
pub fn has_enclosing_paren(sugg: impl AsRef<str>) -> bool {
let mut chars = sugg.as_ref().chars();
if chars.next() == Some('(') {
@@ -877,7 +877,7 @@ impl<'tcx> DerefDelegate<'_, 'tcx> {
.cx
.typeck_results()
.type_dependent_def_id(parent_expr.hir_id)
- .map(|did| self.cx.tcx.fn_sig(did).subst_identity().skip_binder())
+ .map(|did| self.cx.tcx.fn_sig(did).instantiate_identity().skip_binder())
{
std::iter::once(receiver)
.chain(call_args.iter())
@@ -1010,7 +1010,9 @@ impl<'tcx> Delegate<'tcx> for DerefDelegate<'_, 'tcx> {
projections_handled = true;
},
// note: unable to trigger `Subslice` kind in tests
- ProjectionKind::Subslice => (),
+ ProjectionKind::Subslice |
+ // Doesn't have surface syntax. Only occurs in patterns.
+ ProjectionKind::OpaqueCast => (),
ProjectionKind::Deref => {
// Explicit derefs are typically handled later on, but
// some items do not need explicit deref, such as array accesses,
diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs
index d650cbe0b..a05f682aa 100644
--- a/src/tools/clippy/clippy_utils/src/ty.rs
+++ b/src/tools/clippy/clippy_utils/src/ty.rs
@@ -3,33 +3,37 @@
#![allow(clippy::module_name_repetitions)]
use core::ops::ControlFlow;
+use itertools::Itertools;
use rustc_ast::ast::Mutability;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, FnDecl, LangItem, TyKind, Unsafety};
-use rustc_infer::infer::{
- type_variable::{TypeVariableOrigin, TypeVariableOriginKind},
- TyCtxtInferExt,
-};
+use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
+use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
use rustc_middle::mir::interpret::{ConstValue, Scalar};
+use rustc_middle::traits::EvaluationResult;
+use rustc_middle::ty::layout::ValidityRequirement;
use rustc_middle::ty::{
- self, layout::ValidityRequirement, AdtDef, AliasTy, AssocKind, Binder, BoundRegion, FnSig, IntTy, List, ParamEnv,
- Region, RegionKind, SubstsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
- UintTy, VariantDef, VariantDiscr,
+ self, AdtDef, AliasTy, AssocKind, Binder, BoundRegion, FnSig, GenericArg, GenericArgKind, GenericArgsRef,
+ GenericParamDefKind, IntTy, List, ParamEnv, Region, RegionKind, ToPredicate, TraitRef, Ty, TyCtxt,
+ TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, VariantDef, VariantDiscr,
};
-use rustc_middle::ty::{GenericArg, GenericArgKind};
use rustc_span::symbol::Ident;
use rustc_span::{sym, Span, Symbol, DUMMY_SP};
use rustc_target::abi::{Size, VariantIdx};
-use rustc_trait_selection::infer::InferCtxtExt;
+use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
+use rustc_trait_selection::traits::{Obligation, ObligationCause};
use std::iter;
use crate::{match_def_path, path_res, paths};
+mod type_certainty;
+pub use type_certainty::expr_type_is_certain;
+
/// Checks if the given type implements copy.
pub fn is_copy<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
ty.is_copy_modulo_regions(cx.tcx, cx.param_env)
@@ -90,14 +94,14 @@ pub fn contains_ty_adt_constructor_opaque<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'
return false;
}
- for (predicate, _span) in cx.tcx.explicit_item_bounds(def_id).subst_identity_iter_copied() {
+ for (predicate, _span) in cx.tcx.explicit_item_bounds(def_id).instantiate_identity_iter_copied() {
match predicate.kind().skip_binder() {
// For `impl Trait<U>`, it will register a predicate of `T: Trait<U>`, so we go through
// and check substitutions to find `U`.
ty::ClauseKind::Trait(trait_predicate) => {
if trait_predicate
.trait_ref
- .substs
+ .args
.types()
.skip(1) // Skip the implicit `Self` generic parameter
.any(|ty| contains_ty_adt_constructor_opaque_inner(cx, ty, needle, seen))
@@ -206,15 +210,9 @@ pub fn implements_trait<'tcx>(
cx: &LateContext<'tcx>,
ty: Ty<'tcx>,
trait_id: DefId,
- ty_params: &[GenericArg<'tcx>],
+ args: &[GenericArg<'tcx>],
) -> bool {
- implements_trait_with_env(
- cx.tcx,
- cx.param_env,
- ty,
- trait_id,
- ty_params.iter().map(|&arg| Some(arg)),
- )
+ implements_trait_with_env_from_iter(cx.tcx, cx.param_env, ty, trait_id, args.iter().map(|&x| Some(x)))
}
/// Same as `implements_trait` but allows using a `ParamEnv` different from the lint context.
@@ -223,7 +221,18 @@ pub fn implements_trait_with_env<'tcx>(
param_env: ParamEnv<'tcx>,
ty: Ty<'tcx>,
trait_id: DefId,
- ty_params: impl IntoIterator<Item = Option<GenericArg<'tcx>>>,
+ args: &[GenericArg<'tcx>],
+) -> bool {
+ implements_trait_with_env_from_iter(tcx, param_env, ty, trait_id, args.iter().map(|&x| Some(x)))
+}
+
+/// Same as `implements_trait_from_env` but takes the arguments as an iterator.
+pub fn implements_trait_with_env_from_iter<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+ trait_id: DefId,
+ args: impl IntoIterator<Item = impl Into<Option<GenericArg<'tcx>>>>,
) -> bool {
// Clippy shouldn't have infer types
assert!(!ty.has_infer());
@@ -232,19 +241,37 @@ pub fn implements_trait_with_env<'tcx>(
if ty.has_escaping_bound_vars() {
return false;
}
+
let infcx = tcx.infer_ctxt().build();
- let orig = TypeVariableOrigin {
- kind: TypeVariableOriginKind::MiscVariable,
- span: DUMMY_SP,
- };
- let ty_params = tcx.mk_substs_from_iter(
- ty_params
+ let trait_ref = TraitRef::new(
+ tcx,
+ trait_id,
+ Some(GenericArg::from(ty))
.into_iter()
- .map(|arg| arg.unwrap_or_else(|| infcx.next_ty_var(orig).into())),
+ .chain(args.into_iter().map(|arg| {
+ arg.into().unwrap_or_else(|| {
+ let orig = TypeVariableOrigin {
+ kind: TypeVariableOriginKind::MiscVariable,
+ span: DUMMY_SP,
+ };
+ infcx.next_ty_var(orig).into()
+ })
+ })),
);
+
+ debug_assert_eq!(tcx.def_kind(trait_id), DefKind::Trait);
+ #[cfg(debug_assertions)]
+ assert_generic_args_match(tcx, trait_id, trait_ref.args);
+
+ let obligation = Obligation {
+ cause: ObligationCause::dummy(),
+ param_env,
+ recursion_depth: 0,
+ predicate: ty::Binder::dummy(trait_ref).to_predicate(tcx),
+ };
infcx
- .type_implements_trait(trait_id, [ty.into()].into_iter().chain(ty_params), param_env)
- .must_apply_modulo_regions()
+ .evaluate_obligation(&obligation)
+ .is_ok_and(EvaluationResult::must_apply_modulo_regions)
}
/// Checks whether this type implements `Drop`.
@@ -265,7 +292,7 @@ pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
// because we don't want to lint functions returning empty arrays
is_must_use_ty(cx, *ty)
},
- ty::Tuple(substs) => substs.iter().any(|ty| is_must_use_ty(cx, ty)),
+ ty::Tuple(args) => args.iter().any(|ty| is_must_use_ty(cx, ty)),
ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) => {
for (predicate, _) in cx.tcx.explicit_item_bounds(def_id).skip_binder() {
if let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() {
@@ -314,11 +341,11 @@ fn is_normalizable_helper<'tcx>(
let cause = rustc_middle::traits::ObligationCause::dummy();
let result = if infcx.at(&cause, param_env).query_normalize(ty).is_ok() {
match ty.kind() {
- ty::Adt(def, substs) => def.variants().iter().all(|variant| {
+ ty::Adt(def, args) => def.variants().iter().all(|variant| {
variant
.fields
.iter()
- .all(|field| is_normalizable_helper(cx, param_env, field.ty(cx.tcx, substs), cache))
+ .all(|field| is_normalizable_helper(cx, param_env, field.ty(cx.tcx, args), cache))
}),
_ => ty.walk().all(|generic_arg| match generic_arg.unpack() {
GenericArgKind::Type(inner_ty) if inner_ty != ty => {
@@ -392,6 +419,11 @@ pub fn is_type_lang_item(cx: &LateContext<'_>, ty: Ty<'_>, lang_item: hir::LangI
}
}
+/// Gets the diagnostic name of the type, if it has one
+pub fn type_diagnostic_name(cx: &LateContext<'_>, ty: Ty<'_>) -> Option<Symbol> {
+ ty.ty_adt_def().and_then(|adt| cx.tcx.get_diagnostic_name(adt.did()))
+}
+
/// Return `true` if the passed `typ` is `isize` or `usize`.
pub fn is_isize_or_usize(typ: Ty<'_>) -> bool {
matches!(typ.kind(), ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize))
@@ -517,14 +549,14 @@ pub fn walk_ptrs_ty_depth(ty: Ty<'_>) -> (Ty<'_>, usize) {
/// otherwise returns `false`
pub fn same_type_and_consts<'tcx>(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
match (&a.kind(), &b.kind()) {
- (&ty::Adt(did_a, substs_a), &ty::Adt(did_b, substs_b)) => {
+ (&ty::Adt(did_a, args_a), &ty::Adt(did_b, args_b)) => {
if did_a != did_b {
return false;
}
- substs_a
+ args_a
.iter()
- .zip(substs_b.iter())
+ .zip(args_b.iter())
.all(|(arg_a, arg_b)| match (arg_a.unpack(), arg_b.unpack()) {
(GenericArgKind::Const(inner_a), GenericArgKind::Const(inner_b)) => inner_a == inner_b,
(GenericArgKind::Type(type_a), GenericArgKind::Type(type_b)) => {
@@ -643,7 +675,7 @@ impl<'tcx> ExprFnSig<'tcx> {
/// If the expression is function like, get the signature for it.
pub fn expr_sig<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> Option<ExprFnSig<'tcx>> {
if let Res::Def(DefKind::Fn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::AssocFn, id) = path_res(cx, expr) {
- Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).subst_identity(), Some(id)))
+ Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).instantiate_identity(), Some(id)))
} else {
ty_sig(cx, cx.typeck_results().expr_ty_adjusted(expr).peel_refs())
}
@@ -661,11 +693,11 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t
.and_then(|id| cx.tcx.hir().fn_decl_by_hir_id(cx.tcx.hir().local_def_id_to_hir_id(id)));
Some(ExprFnSig::Closure(decl, subs.as_closure().sig()))
},
- ty::FnDef(id, subs) => Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).subst(cx.tcx, subs), Some(id))),
- ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => sig_from_bounds(
+ ty::FnDef(id, subs) => Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).instantiate(cx.tcx, subs), Some(id))),
+ ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => sig_from_bounds(
cx,
ty,
- cx.tcx.item_bounds(def_id).subst_iter(cx.tcx, substs),
+ cx.tcx.item_bounds(def_id).iter_instantiated(cx.tcx, args),
cx.tcx.opt_parent(def_id),
),
ty::FnPtr(sig) => Some(ExprFnSig::Sig(sig, None)),
@@ -681,7 +713,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t
.projection_bounds()
.find(|p| lang_items.fn_once_output().map_or(false, |id| id == p.item_def_id()))
.map(|p| p.map_bound(|p| p.term.ty().unwrap()));
- Some(ExprFnSig::Trait(bound.map_bound(|b| b.substs.type_at(0)), output, None))
+ Some(ExprFnSig::Trait(bound.map_bound(|b| b.args.type_at(0)), output, None))
},
_ => None,
}
@@ -713,7 +745,7 @@ fn sig_from_bounds<'tcx>(
|| lang_items.fn_once_trait() == Some(p.def_id()))
&& p.self_ty() == ty =>
{
- let i = pred.kind().rebind(p.trait_ref.substs.type_at(1));
+ let i = pred.kind().rebind(p.trait_ref.args.type_at(1));
if inputs.map_or(false, |inputs| i != inputs) {
// Multiple different fn trait impls. Is this even allowed?
return None;
@@ -744,7 +776,7 @@ fn sig_for_projection<'tcx>(cx: &LateContext<'tcx>, ty: AliasTy<'tcx>) -> Option
for (pred, _) in cx
.tcx
.explicit_item_bounds(ty.def_id)
- .subst_iter_copied(cx.tcx, ty.substs)
+ .iter_instantiated_copied(cx.tcx, ty.args)
{
match pred.kind().skip_binder() {
ty::ClauseKind::Trait(p)
@@ -752,7 +784,7 @@ fn sig_for_projection<'tcx>(cx: &LateContext<'tcx>, ty: AliasTy<'tcx>) -> Option
|| lang_items.fn_mut_trait() == Some(p.def_id())
|| lang_items.fn_once_trait() == Some(p.def_id())) =>
{
- let i = pred.kind().rebind(p.trait_ref.substs.type_at(1));
+ let i = pred.kind().rebind(p.trait_ref.args.type_at(1));
if inputs.map_or(false, |inputs| inputs != i) {
// Multiple different fn trait impls. Is this even allowed?
@@ -793,7 +825,7 @@ impl core::ops::Add<u32> for EnumValue {
#[expect(clippy::cast_possible_truncation, clippy::cast_possible_wrap)]
pub fn read_explicit_enum_value(tcx: TyCtxt<'_>, id: DefId) -> Option<EnumValue> {
if let Ok(ConstValue::Scalar(Scalar::Int(value))) = tcx.const_eval_poly(id) {
- match tcx.type_of(id).subst_identity().kind() {
+ match tcx.type_of(id).instantiate_identity().kind() {
ty::Int(_) => Some(EnumValue::Signed(match value.size().bytes() {
1 => i128::from(value.assert_bits(Size::from_bytes(1)) as u8 as i8),
2 => i128::from(value.assert_bits(Size::from_bytes(2)) as u16 as i16),
@@ -927,7 +959,7 @@ pub fn adt_and_variant_of_res<'tcx>(cx: &LateContext<'tcx>, res: Res) -> Option<
Some((adt, adt.variant_with_id(var_id)))
},
Res::SelfCtor(id) => {
- let adt = cx.tcx.type_of(id).subst_identity().ty_adt_def().unwrap();
+ let adt = cx.tcx.type_of(id).instantiate_identity().ty_adt_def().unwrap();
Some((adt, adt.non_enum_variant()))
},
_ => None,
@@ -940,8 +972,7 @@ pub fn ty_is_fn_once_param<'tcx>(tcx: TyCtxt<'_>, ty: Ty<'tcx>, predicates: &'tc
return false;
};
let lang = tcx.lang_items();
- let (Some(fn_once_id), Some(fn_mut_id), Some(fn_id))
- = (lang.fn_once_trait(), lang.fn_mut_trait(), lang.fn_trait())
+ let (Some(fn_once_id), Some(fn_mut_id), Some(fn_id)) = (lang.fn_once_trait(), lang.fn_mut_trait(), lang.fn_trait())
else {
return false;
};
@@ -1014,91 +1045,104 @@ pub fn approx_ty_size<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> u64 {
}
}
+/// Asserts that the given arguments match the generic parameters of the given item.
+#[allow(dead_code)]
+fn assert_generic_args_match<'tcx>(tcx: TyCtxt<'tcx>, did: DefId, args: &[GenericArg<'tcx>]) {
+ let g = tcx.generics_of(did);
+ let parent = g.parent.map(|did| tcx.generics_of(did));
+ let count = g.parent_count + g.params.len();
+ let params = parent
+ .map_or([].as_slice(), |p| p.params.as_slice())
+ .iter()
+ .chain(&g.params)
+ .map(|x| &x.kind);
+
+ assert!(
+ count == args.len(),
+ "wrong number of arguments for `{did:?}`: expected `{count}`, found {}\n\
+ note: the expected arguments are: `[{}]`\n\
+ the given arguments are: `{args:#?}`",
+ args.len(),
+ params.clone().map(GenericParamDefKind::descr).format(", "),
+ );
+
+ if let Some((idx, (param, arg))) =
+ params
+ .clone()
+ .zip(args.iter().map(|&x| x.unpack()))
+ .enumerate()
+ .find(|(_, (param, arg))| match (param, arg) {
+ (GenericParamDefKind::Lifetime, GenericArgKind::Lifetime(_))
+ | (GenericParamDefKind::Type { .. }, GenericArgKind::Type(_))
+ | (GenericParamDefKind::Const { .. }, GenericArgKind::Const(_)) => false,
+ (
+ GenericParamDefKind::Lifetime
+ | GenericParamDefKind::Type { .. }
+ | GenericParamDefKind::Const { .. },
+ _,
+ ) => true,
+ })
+ {
+ panic!(
+ "incorrect argument for `{did:?}` at index `{idx}`: expected a {}, found `{arg:?}`\n\
+ note: the expected arguments are `[{}]`\n\
+ the given arguments are `{args:#?}`",
+ param.descr(),
+ params.clone().map(GenericParamDefKind::descr).format(", "),
+ );
+ }
+}
+
+/// Returns whether `ty` is never-like; i.e., `!` (never) or an enum with zero variants.
+pub fn is_never_like(ty: Ty<'_>) -> bool {
+ ty.is_never() || (ty.is_enum() && ty.ty_adt_def().is_some_and(|def| def.variants().is_empty()))
+}
+
/// Makes the projection type for the named associated type in the given impl or trait impl.
///
/// This function is for associated types which are "known" to exist, and as such, will only return
/// `None` when debug assertions are disabled in order to prevent ICE's. With debug assertions
/// enabled this will check that the named associated type exists, the correct number of
-/// substitutions are given, and that the correct kinds of substitutions are given (lifetime,
+/// arguments are given, and that the correct kinds of arguments are given (lifetime,
/// constant or type). This will not check if type normalization would succeed.
pub fn make_projection<'tcx>(
tcx: TyCtxt<'tcx>,
container_id: DefId,
assoc_ty: Symbol,
- substs: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
+ args: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
) -> Option<AliasTy<'tcx>> {
fn helper<'tcx>(
tcx: TyCtxt<'tcx>,
container_id: DefId,
assoc_ty: Symbol,
- substs: SubstsRef<'tcx>,
+ args: GenericArgsRef<'tcx>,
) -> Option<AliasTy<'tcx>> {
- let Some(assoc_item) = tcx
- .associated_items(container_id)
- .find_by_name_and_kind(tcx, Ident::with_dummy_span(assoc_ty), AssocKind::Type, container_id)
- else {
+ let Some(assoc_item) = tcx.associated_items(container_id).find_by_name_and_kind(
+ tcx,
+ Ident::with_dummy_span(assoc_ty),
+ AssocKind::Type,
+ container_id,
+ ) else {
debug_assert!(false, "type `{assoc_ty}` not found in `{container_id:?}`");
return None;
};
#[cfg(debug_assertions)]
- {
- let generics = tcx.generics_of(assoc_item.def_id);
- let generic_count = generics.parent_count + generics.params.len();
- let params = generics
- .parent
- .map_or([].as_slice(), |id| &*tcx.generics_of(id).params)
- .iter()
- .chain(&generics.params)
- .map(|x| &x.kind);
-
- debug_assert!(
- generic_count == substs.len(),
- "wrong number of substs for `{:?}`: found `{}` expected `{generic_count}`.\n\
- note: the expected parameters are: {:#?}\n\
- the given arguments are: `{substs:#?}`",
- assoc_item.def_id,
- substs.len(),
- params.map(ty::GenericParamDefKind::descr).collect::<Vec<_>>(),
- );
-
- if let Some((idx, (param, arg))) = params
- .clone()
- .zip(substs.iter().map(GenericArg::unpack))
- .enumerate()
- .find(|(_, (param, arg))| {
- !matches!(
- (param, arg),
- (ty::GenericParamDefKind::Lifetime, GenericArgKind::Lifetime(_))
- | (ty::GenericParamDefKind::Type { .. }, GenericArgKind::Type(_))
- | (ty::GenericParamDefKind::Const { .. }, GenericArgKind::Const(_))
- )
- })
- {
- debug_assert!(
- false,
- "mismatched subst type at index {idx}: expected a {}, found `{arg:?}`\n\
- note: the expected parameters are {:#?}\n\
- the given arguments are {substs:#?}",
- param.descr(),
- params.map(ty::GenericParamDefKind::descr).collect::<Vec<_>>()
- );
- }
- }
+ assert_generic_args_match(tcx, assoc_item.def_id, args);
- Some(tcx.mk_alias_ty(assoc_item.def_id, substs))
+ Some(tcx.mk_alias_ty(assoc_item.def_id, args))
}
helper(
tcx,
container_id,
assoc_ty,
- tcx.mk_substs_from_iter(substs.into_iter().map(Into::into)),
+ tcx.mk_args_from_iter(args.into_iter().map(Into::into)),
)
}
/// Normalizes the named associated type in the given impl or trait impl.
///
/// This function is for associated types which are "known" to be valid with the given
-/// substitutions, and as such, will only return `None` when debug assertions are disabled in order
+/// arguments, and as such, will only return `None` when debug assertions are disabled in order
/// to prevent ICE's. With debug assertions enabled this will check that type normalization
/// succeeds as well as everything checked by `make_projection`.
pub fn make_normalized_projection<'tcx>(
@@ -1106,25 +1150,20 @@ pub fn make_normalized_projection<'tcx>(
param_env: ParamEnv<'tcx>,
container_id: DefId,
assoc_ty: Symbol,
- substs: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
+ args: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
) -> Option<Ty<'tcx>> {
fn helper<'tcx>(tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: AliasTy<'tcx>) -> Option<Ty<'tcx>> {
#[cfg(debug_assertions)]
- if let Some((i, subst)) = ty
- .substs
- .iter()
- .enumerate()
- .find(|(_, subst)| subst.has_late_bound_regions())
- {
+ if let Some((i, arg)) = ty.args.iter().enumerate().find(|(_, arg)| arg.has_late_bound_regions()) {
debug_assert!(
false,
- "substs contain late-bound region at index `{i}` which can't be normalized.\n\
+ "args contain late-bound region at index `{i}` which can't be normalized.\n\
use `TyCtxt::erase_late_bound_regions`\n\
- note: subst is `{subst:#?}`",
+ note: arg is `{arg:#?}`",
);
return None;
}
- match tcx.try_normalize_erasing_regions(param_env, Ty::new_projection(tcx,ty.def_id, ty.substs)) {
+ match tcx.try_normalize_erasing_regions(param_env, Ty::new_projection(tcx, ty.def_id, ty.args)) {
Ok(ty) => Some(ty),
Err(e) => {
debug_assert!(false, "failed to normalize type `{ty}`: {e:#?}");
@@ -1132,7 +1171,7 @@ pub fn make_normalized_projection<'tcx>(
},
}
}
- helper(tcx, param_env, make_projection(tcx, container_id, assoc_ty, substs)?)
+ helper(tcx, param_env, make_projection(tcx, container_id, assoc_ty, args)?)
}
/// Check if given type has inner mutability such as [`std::cell::Cell`] or [`std::cell::RefCell`]
@@ -1147,7 +1186,7 @@ pub fn is_interior_mut_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
&& is_interior_mut_ty(cx, inner_ty)
},
ty::Tuple(fields) => fields.iter().any(|ty| is_interior_mut_ty(cx, ty)),
- ty::Adt(def, substs) => {
+ ty::Adt(def, args) => {
// Special case for collections in `std` who's impl of `Hash` or `Ord` delegates to
// that of their type parameters. Note: we don't include `HashSet` and `HashMap`
// because they have no impl for `Hash` or `Ord`.
@@ -1168,7 +1207,7 @@ pub fn is_interior_mut_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
let is_box = Some(def_id) == cx.tcx.lang_items().owned_box();
if is_std_collection || is_box {
// The type is mutable if any of its type parameters are
- substs.types().any(|ty| is_interior_mut_ty(cx, ty))
+ args.types().any(|ty| is_interior_mut_ty(cx, ty))
} else {
!ty.has_escaping_bound_vars()
&& cx.tcx.layout_of(cx.param_env.and(ty)).is_ok()
@@ -1184,21 +1223,16 @@ pub fn make_normalized_projection_with_regions<'tcx>(
param_env: ParamEnv<'tcx>,
container_id: DefId,
assoc_ty: Symbol,
- substs: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
+ args: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
) -> Option<Ty<'tcx>> {
fn helper<'tcx>(tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: AliasTy<'tcx>) -> Option<Ty<'tcx>> {
#[cfg(debug_assertions)]
- if let Some((i, subst)) = ty
- .substs
- .iter()
- .enumerate()
- .find(|(_, subst)| subst.has_late_bound_regions())
- {
+ if let Some((i, arg)) = ty.args.iter().enumerate().find(|(_, arg)| arg.has_late_bound_regions()) {
debug_assert!(
false,
- "substs contain late-bound region at index `{i}` which can't be normalized.\n\
+ "args contain late-bound region at index `{i}` which can't be normalized.\n\
use `TyCtxt::erase_late_bound_regions`\n\
- note: subst is `{subst:#?}`",
+ note: arg is `{arg:#?}`",
);
return None;
}
@@ -1207,7 +1241,7 @@ pub fn make_normalized_projection_with_regions<'tcx>(
.infer_ctxt()
.build()
.at(&cause, param_env)
- .query_normalize(Ty::new_projection(tcx,ty.def_id, ty.substs))
+ .query_normalize(Ty::new_projection(tcx, ty.def_id, ty.args))
{
Ok(ty) => Some(ty.value),
Err(e) => {
@@ -1216,7 +1250,7 @@ pub fn make_normalized_projection_with_regions<'tcx>(
},
}
}
- helper(tcx, param_env, make_projection(tcx, container_id, assoc_ty, substs)?)
+ helper(tcx, param_env, make_projection(tcx, container_id, assoc_ty, args)?)
}
pub fn normalize_with_regions<'tcx>(tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
diff --git a/src/tools/clippy/clippy_utils/src/ty/type_certainty/certainty.rs b/src/tools/clippy/clippy_utils/src/ty/type_certainty/certainty.rs
new file mode 100644
index 000000000..0e69ffa22
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/ty/type_certainty/certainty.rs
@@ -0,0 +1,122 @@
+use rustc_hir::def_id::DefId;
+use std::fmt::Debug;
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum Certainty {
+ /// Determining the type requires contextual information.
+ Uncertain,
+
+ /// The type can be determined purely from subexpressions. If the argument is `Some(..)`, the
+ /// specific `DefId` is known. Such arguments are needed to handle path segments whose `res` is
+ /// `Res::Err`.
+ Certain(Option<DefId>),
+
+ /// The heuristic believes that more than one `DefId` applies to a type---this is a bug.
+ Contradiction,
+}
+
+pub trait Meet {
+ fn meet(self, other: Self) -> Self;
+}
+
+pub trait TryJoin: Sized {
+ fn try_join(self, other: Self) -> Option<Self>;
+}
+
+impl Meet for Option<DefId> {
+ fn meet(self, other: Self) -> Self {
+ match (self, other) {
+ (None, _) | (_, None) => None,
+ (Some(lhs), Some(rhs)) => (lhs == rhs).then_some(lhs),
+ }
+ }
+}
+
+impl TryJoin for Option<DefId> {
+ fn try_join(self, other: Self) -> Option<Self> {
+ match (self, other) {
+ (Some(lhs), Some(rhs)) => (lhs == rhs).then_some(Some(lhs)),
+ (Some(def_id), _) | (_, Some(def_id)) => Some(Some(def_id)),
+ (None, None) => Some(None),
+ }
+ }
+}
+
+impl Meet for Certainty {
+ fn meet(self, other: Self) -> Self {
+ match (self, other) {
+ (Certainty::Uncertain, _) | (_, Certainty::Uncertain) => Certainty::Uncertain,
+ (Certainty::Certain(lhs), Certainty::Certain(rhs)) => Certainty::Certain(lhs.meet(rhs)),
+ (Certainty::Certain(inner), _) | (_, Certainty::Certain(inner)) => Certainty::Certain(inner),
+ (Certainty::Contradiction, Certainty::Contradiction) => Certainty::Contradiction,
+ }
+ }
+}
+
+impl Certainty {
+ /// Join two `Certainty`s preserving their `DefId`s (if any). Generally speaking, this method
+ /// should be used only when `self` and `other` refer directly to types. Otherwise,
+ /// `join_clearing_def_ids` should be used.
+ pub fn join(self, other: Self) -> Self {
+ match (self, other) {
+ (Certainty::Contradiction, _) | (_, Certainty::Contradiction) => Certainty::Contradiction,
+
+ (Certainty::Certain(lhs), Certainty::Certain(rhs)) => {
+ if let Some(inner) = lhs.try_join(rhs) {
+ Certainty::Certain(inner)
+ } else {
+ debug_assert!(false, "Contradiction with {lhs:?} and {rhs:?}");
+ Certainty::Contradiction
+ }
+ },
+
+ (Certainty::Certain(inner), _) | (_, Certainty::Certain(inner)) => Certainty::Certain(inner),
+
+ (Certainty::Uncertain, Certainty::Uncertain) => Certainty::Uncertain,
+ }
+ }
+
+ /// Join two `Certainty`s after clearing their `DefId`s. This method should be used when `self`
+ /// or `other` do not necessarily refer to types, e.g., when they are aggregations of other
+ /// `Certainty`s.
+ pub fn join_clearing_def_ids(self, other: Self) -> Self {
+ self.clear_def_id().join(other.clear_def_id())
+ }
+
+ pub fn clear_def_id(self) -> Certainty {
+ if matches!(self, Certainty::Certain(_)) {
+ Certainty::Certain(None)
+ } else {
+ self
+ }
+ }
+
+ pub fn with_def_id(self, def_id: DefId) -> Certainty {
+ if matches!(self, Certainty::Certain(_)) {
+ Certainty::Certain(Some(def_id))
+ } else {
+ self
+ }
+ }
+
+ pub fn to_def_id(self) -> Option<DefId> {
+ match self {
+ Certainty::Certain(inner) => inner,
+ _ => None,
+ }
+ }
+
+ pub fn is_certain(self) -> bool {
+ matches!(self, Self::Certain(_))
+ }
+}
+
+/// Think: `iter.all(/* is certain */)`
+pub fn meet(iter: impl Iterator<Item = Certainty>) -> Certainty {
+ iter.fold(Certainty::Certain(None), Certainty::meet)
+}
+
+/// Think: `iter.any(/* is certain */)`
+pub fn join(iter: impl Iterator<Item = Certainty>) -> Certainty {
+ iter.fold(Certainty::Uncertain, Certainty::join)
+}
diff --git a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs
new file mode 100644
index 000000000..06fd95290
--- /dev/null
+++ b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs
@@ -0,0 +1,320 @@
+//! A heuristic to tell whether an expression's type can be determined purely from its
+//! subexpressions, and the arguments and locals they use. Put another way, `expr_type_is_certain`
+//! tries to tell whether an expression's type can be determined without appeal to the surrounding
+//! context.
+//!
+//! This is, in some sense, a counterpart to `let_unit_value`'s `expr_needs_inferred_result`.
+//! Intuitively, that function determines whether an expression's type is needed for type inference,
+//! whereas `expr_type_is_certain` determines whether type inference is needed for an expression's
+//! type.
+//!
+//! As a heuristic, `expr_type_is_certain` may produce false negatives, but a false positive should
+//! be considered a bug.
+
+use crate::def_path_res;
+use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def_id::DefId;
+use rustc_hir::intravisit::{walk_qpath, walk_ty, Visitor};
+use rustc_hir::{self as hir, Expr, ExprKind, GenericArgs, HirId, Node, PathSegment, QPath, TyKind};
+use rustc_lint::LateContext;
+use rustc_middle::ty::{self, AdtDef, GenericArgKind, Ty};
+use rustc_span::{Span, Symbol};
+
+mod certainty;
+use certainty::{join, meet, Certainty, Meet};
+
+pub fn expr_type_is_certain(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ expr_type_certainty(cx, expr).is_certain()
+}
+
+fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty {
+ let certainty = match &expr.kind {
+ ExprKind::Unary(_, expr)
+ | ExprKind::Field(expr, _)
+ | ExprKind::Index(expr, _, _)
+ | ExprKind::AddrOf(_, _, expr) => expr_type_certainty(cx, expr),
+
+ ExprKind::Array(exprs) => join(exprs.iter().map(|expr| expr_type_certainty(cx, expr))),
+
+ ExprKind::Call(callee, args) => {
+ let lhs = expr_type_certainty(cx, callee);
+ let rhs = if type_is_inferrable_from_arguments(cx, expr) {
+ meet(args.iter().map(|arg| expr_type_certainty(cx, arg)))
+ } else {
+ Certainty::Uncertain
+ };
+ lhs.join_clearing_def_ids(rhs)
+ },
+
+ ExprKind::MethodCall(method, receiver, args, _) => {
+ let mut receiver_type_certainty = expr_type_certainty(cx, receiver);
+ // Even if `receiver_type_certainty` is `Certain(Some(..))`, the `Self` type in the method
+ // identified by `type_dependent_def_id(..)` can differ. This can happen as a result of a `deref`,
+ // for example. So update the `DefId` in `receiver_type_certainty` (if any).
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ && let Some(self_ty_def_id) = adt_def_id(self_ty(cx, method_def_id))
+ {
+ receiver_type_certainty = receiver_type_certainty.with_def_id(self_ty_def_id);
+ };
+ let lhs = path_segment_certainty(cx, receiver_type_certainty, method, false);
+ let rhs = if type_is_inferrable_from_arguments(cx, expr) {
+ meet(
+ std::iter::once(receiver_type_certainty).chain(args.iter().map(|arg| expr_type_certainty(cx, arg))),
+ )
+ } else {
+ Certainty::Uncertain
+ };
+ lhs.join(rhs)
+ },
+
+ ExprKind::Tup(exprs) => meet(exprs.iter().map(|expr| expr_type_certainty(cx, expr))),
+
+ ExprKind::Binary(_, lhs, rhs) => expr_type_certainty(cx, lhs).meet(expr_type_certainty(cx, rhs)),
+
+ ExprKind::Lit(_) => Certainty::Certain(None),
+
+ ExprKind::Cast(_, ty) => type_certainty(cx, ty),
+
+ ExprKind::If(_, if_expr, Some(else_expr)) => {
+ expr_type_certainty(cx, if_expr).join(expr_type_certainty(cx, else_expr))
+ },
+
+ ExprKind::Path(qpath) => qpath_certainty(cx, qpath, false),
+
+ ExprKind::Struct(qpath, _, _) => qpath_certainty(cx, qpath, true),
+
+ _ => Certainty::Uncertain,
+ };
+
+ let expr_ty = cx.typeck_results().expr_ty(expr);
+ if let Some(def_id) = adt_def_id(expr_ty) {
+ certainty.with_def_id(def_id)
+ } else {
+ certainty
+ }
+}
+
+struct CertaintyVisitor<'cx, 'tcx> {
+ cx: &'cx LateContext<'tcx>,
+ certainty: Certainty,
+}
+
+impl<'cx, 'tcx> CertaintyVisitor<'cx, 'tcx> {
+ fn new(cx: &'cx LateContext<'tcx>) -> Self {
+ Self {
+ cx,
+ certainty: Certainty::Certain(None),
+ }
+ }
+}
+
+impl<'cx, 'tcx> Visitor<'cx> for CertaintyVisitor<'cx, 'tcx> {
+ fn visit_qpath(&mut self, qpath: &'cx QPath<'_>, hir_id: HirId, _: Span) {
+ self.certainty = self.certainty.meet(qpath_certainty(self.cx, qpath, true));
+ if self.certainty != Certainty::Uncertain {
+ walk_qpath(self, qpath, hir_id);
+ }
+ }
+
+ fn visit_ty(&mut self, ty: &'cx hir::Ty<'_>) {
+ if matches!(ty.kind, TyKind::Infer) {
+ self.certainty = Certainty::Uncertain;
+ }
+ if self.certainty != Certainty::Uncertain {
+ walk_ty(self, ty);
+ }
+ }
+}
+
+fn type_certainty(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> Certainty {
+ // Handle `TyKind::Path` specially so that its `DefId` can be preserved.
+ //
+ // Note that `CertaintyVisitor::new` initializes the visitor's internal certainty to
+ // `Certainty::Certain(None)`. Furthermore, if a `TyKind::Path` is encountered while traversing
+ // `ty`, the result of the call to `qpath_certainty` is combined with the visitor's internal
+ // certainty using `Certainty::meet`. Thus, if the `TyKind::Path` were not treated specially here,
+ // the resulting certainty would be `Certainty::Certain(None)`.
+ if let TyKind::Path(qpath) = &ty.kind {
+ return qpath_certainty(cx, qpath, true);
+ }
+
+ let mut visitor = CertaintyVisitor::new(cx);
+ visitor.visit_ty(ty);
+ visitor.certainty
+}
+
+fn generic_args_certainty(cx: &LateContext<'_>, args: &GenericArgs<'_>) -> Certainty {
+ let mut visitor = CertaintyVisitor::new(cx);
+ visitor.visit_generic_args(args);
+ visitor.certainty
+}
+
+/// Tries to tell whether a `QPath` resolves to something certain, e.g., whether all of its path
+/// segments generic arguments are are instantiated.
+///
+/// `qpath` could refer to either a type or a value. The heuristic never needs the `DefId` of a
+/// value. So `DefId`s are retained only when `resolves_to_type` is true.
+fn qpath_certainty(cx: &LateContext<'_>, qpath: &QPath<'_>, resolves_to_type: bool) -> Certainty {
+ let certainty = match qpath {
+ QPath::Resolved(ty, path) => {
+ let len = path.segments.len();
+ path.segments.iter().enumerate().fold(
+ ty.map_or(Certainty::Uncertain, |ty| type_certainty(cx, ty)),
+ |parent_certainty, (i, path_segment)| {
+ path_segment_certainty(cx, parent_certainty, path_segment, i != len - 1 || resolves_to_type)
+ },
+ )
+ },
+
+ QPath::TypeRelative(ty, path_segment) => {
+ path_segment_certainty(cx, type_certainty(cx, ty), path_segment, resolves_to_type)
+ },
+
+ QPath::LangItem(lang_item, _, _) => {
+ cx.tcx
+ .lang_items()
+ .get(*lang_item)
+ .map_or(Certainty::Uncertain, |def_id| {
+ let generics = cx.tcx.generics_of(def_id);
+ if generics.parent_count == 0 && generics.params.is_empty() {
+ Certainty::Certain(if resolves_to_type { Some(def_id) } else { None })
+ } else {
+ Certainty::Uncertain
+ }
+ })
+ },
+ };
+ debug_assert!(resolves_to_type || certainty.to_def_id().is_none());
+ certainty
+}
+
+fn path_segment_certainty(
+ cx: &LateContext<'_>,
+ parent_certainty: Certainty,
+ path_segment: &PathSegment<'_>,
+ resolves_to_type: bool,
+) -> Certainty {
+ let certainty = match update_res(cx, parent_certainty, path_segment).unwrap_or(path_segment.res) {
+ // A definition's type is certain if it refers to something without generics (e.g., a crate or module, or
+ // an unparameterized type), or the generics are instantiated with arguments that are certain.
+ //
+ // If the parent is uncertain, then the current path segment must account for the parent's generic arguments.
+ // Consider the following examples, where the current path segment is `None`:
+ // - `Option::None` // uncertain; parent (i.e., `Option`) is uncertain
+ // - `Option::<Vec<u64>>::None` // certain; parent (i.e., `Option::<..>`) is certain
+ // - `Option::None::<Vec<u64>>` // certain; parent (i.e., `Option`) is uncertain
+ Res::Def(_, def_id) => {
+ // Checking `res_generics_def_id(..)` before calling `generics_of` avoids an ICE.
+ if cx.tcx.res_generics_def_id(path_segment.res).is_some() {
+ let generics = cx.tcx.generics_of(def_id);
+ let lhs = if (parent_certainty.is_certain() || generics.parent_count == 0) && generics.params.is_empty()
+ {
+ Certainty::Certain(None)
+ } else {
+ Certainty::Uncertain
+ };
+ let rhs = path_segment
+ .args
+ .map_or(Certainty::Uncertain, |args| generic_args_certainty(cx, args));
+ // See the comment preceding `qpath_certainty`. `def_id` could refer to a type or a value.
+ let certainty = lhs.join_clearing_def_ids(rhs);
+ if resolves_to_type {
+ if let DefKind::TyAlias { .. } = cx.tcx.def_kind(def_id) {
+ adt_def_id(cx.tcx.type_of(def_id).instantiate_identity())
+ .map_or(certainty, |def_id| certainty.with_def_id(def_id))
+ } else {
+ certainty.with_def_id(def_id)
+ }
+ } else {
+ certainty
+ }
+ } else {
+ Certainty::Certain(None)
+ }
+ },
+
+ Res::PrimTy(_) | Res::SelfTyParam { .. } | Res::SelfTyAlias { .. } | Res::SelfCtor(_) => {
+ Certainty::Certain(None)
+ },
+
+ // `get_parent` because `hir_id` refers to a `Pat`, and we're interested in the node containing the `Pat`.
+ Res::Local(hir_id) => match cx.tcx.hir().get_parent(hir_id) {
+ // An argument's type is always certain.
+ Node::Param(..) => Certainty::Certain(None),
+ // A local's type is certain if its type annotation is certain or it has an initializer whose
+ // type is certain.
+ Node::Local(local) => {
+ let lhs = local.ty.map_or(Certainty::Uncertain, |ty| type_certainty(cx, ty));
+ let rhs = local
+ .init
+ .map_or(Certainty::Uncertain, |init| expr_type_certainty(cx, init));
+ let certainty = lhs.join(rhs);
+ if resolves_to_type {
+ certainty
+ } else {
+ certainty.clear_def_id()
+ }
+ },
+ _ => Certainty::Uncertain,
+ },
+
+ _ => Certainty::Uncertain,
+ };
+ debug_assert!(resolves_to_type || certainty.to_def_id().is_none());
+ certainty
+}
+
+/// For at least some `QPath::TypeRelative`, the path segment's `res` can be `Res::Err`.
+/// `update_res` tries to fix the resolution when `parent_certainty` is `Certain(Some(..))`.
+fn update_res(cx: &LateContext<'_>, parent_certainty: Certainty, path_segment: &PathSegment<'_>) -> Option<Res> {
+ if path_segment.res == Res::Err && let Some(def_id) = parent_certainty.to_def_id() {
+ let mut def_path = cx.get_def_path(def_id);
+ def_path.push(path_segment.ident.name);
+ let reses = def_path_res(cx, &def_path.iter().map(Symbol::as_str).collect::<Vec<_>>());
+ if let [res] = reses.as_slice() { Some(*res) } else { None }
+ } else {
+ None
+ }
+}
+
+#[allow(clippy::cast_possible_truncation)]
+fn type_is_inferrable_from_arguments(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
+ let Some(callee_def_id) = (match expr.kind {
+ ExprKind::Call(callee, _) => {
+ let callee_ty = cx.typeck_results().expr_ty(callee);
+ if let ty::FnDef(callee_def_id, _) = callee_ty.kind() {
+ Some(*callee_def_id)
+ } else {
+ None
+ }
+ },
+ ExprKind::MethodCall(_, _, _, _) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
+ _ => None,
+ }) else {
+ return false;
+ };
+
+ let generics = cx.tcx.generics_of(callee_def_id);
+ let fn_sig = cx.tcx.fn_sig(callee_def_id).skip_binder();
+
+ // Check that all type parameters appear in the functions input types.
+ (0..(generics.parent_count + generics.params.len()) as u32).all(|index| {
+ fn_sig
+ .inputs()
+ .iter()
+ .any(|input_ty| contains_param(*input_ty.skip_binder(), index))
+ })
+}
+
+fn self_ty<'tcx>(cx: &LateContext<'tcx>, method_def_id: DefId) -> Ty<'tcx> {
+ cx.tcx.fn_sig(method_def_id).skip_binder().inputs().skip_binder()[0]
+}
+
+fn adt_def_id(ty: Ty<'_>) -> Option<DefId> {
+ ty.peel_refs().ty_adt_def().map(AdtDef::did)
+}
+
+fn contains_param(ty: Ty<'_>, index: u32) -> bool {
+ ty.walk()
+ .any(|arg| matches!(arg.unpack(), GenericArgKind::Type(ty) if ty.is_param(index)))
+}
diff --git a/src/tools/clippy/clippy_utils/src/usage.rs b/src/tools/clippy/clippy_utils/src/usage.rs
index 985508521..39ef76348 100644
--- a/src/tools/clippy/clippy_utils/src/usage.rs
+++ b/src/tools/clippy/clippy_utils/src/usage.rs
@@ -1,10 +1,9 @@
-use crate as utils;
-use crate::visitors::{for_each_expr, for_each_expr_with_closures, Descend};
+use crate::visitors::{for_each_expr, for_each_expr_with_closures, Descend, Visitable};
+use crate::{self as utils, get_enclosing_loop_or_multi_call_closure};
use core::ops::ControlFlow;
-use rustc_hir as hir;
+use hir::def::Res;
use rustc_hir::intravisit::{self, Visitor};
-use rustc_hir::HirIdSet;
-use rustc_hir::{Expr, ExprKind, HirId, Node};
+use rustc_hir::{self as hir, Expr, ExprKind, HirId, HirIdSet};
use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
@@ -129,7 +128,7 @@ impl<'a, 'tcx> intravisit::Visitor<'tcx> for BindingUsageFinder<'a, 'tcx> {
}
fn visit_path(&mut self, path: &hir::Path<'tcx>, _: hir::HirId) {
- if let hir::def::Res::Local(id) = path.res {
+ if let Res::Local(id) = path.res {
if self.binding_ids.contains(&id) {
self.usage_found = true;
}
@@ -155,8 +154,21 @@ pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool {
.is_some()
}
+pub fn local_used_in<'tcx>(cx: &LateContext<'tcx>, local_id: HirId, v: impl Visitable<'tcx>) -> bool {
+ for_each_expr_with_closures(cx, v, |e| {
+ if utils::path_to_local_id(e, local_id) {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
+ })
+ .is_some()
+}
+
pub fn local_used_after_expr(cx: &LateContext<'_>, local_id: HirId, after: &Expr<'_>) -> bool {
- let Some(block) = utils::get_enclosing_block(cx, local_id) else { return false };
+ let Some(block) = utils::get_enclosing_block(cx, local_id) else {
+ return false;
+ };
// for _ in 1..3 {
// local
@@ -165,32 +177,21 @@ pub fn local_used_after_expr(cx: &LateContext<'_>, local_id: HirId, after: &Expr
// let closure = || local;
// closure();
// closure();
- let in_loop_or_closure = cx
- .tcx
- .hir()
- .parent_iter(after.hir_id)
- .take_while(|&(id, _)| id != block.hir_id)
- .any(|(_, node)| {
- matches!(
- node,
- Node::Expr(Expr {
- kind: ExprKind::Loop(..) | ExprKind::Closure { .. },
- ..
- })
- )
- });
- if in_loop_or_closure {
- return true;
- }
+ let loop_start = get_enclosing_loop_or_multi_call_closure(cx, after).map(|e| e.hir_id);
let mut past_expr = false;
for_each_expr_with_closures(cx, block, |e| {
- if e.hir_id == after.hir_id {
+ if past_expr {
+ if utils::path_to_local_id(e, local_id) {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(Descend::Yes)
+ }
+ } else if e.hir_id == after.hir_id {
past_expr = true;
ControlFlow::Continue(Descend::No)
- } else if past_expr && utils::path_to_local_id(e, local_id) {
- ControlFlow::Break(())
} else {
+ past_expr = Some(e.hir_id) == loop_start;
ControlFlow::Continue(Descend::Yes)
}
})
diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs
index 8dafa723a..3b47a4513 100644
--- a/src/tools/clippy/clippy_utils/src/visitors.rs
+++ b/src/tools/clippy/clippy_utils/src/visitors.rs
@@ -52,6 +52,16 @@ pub trait Visitable<'tcx> {
/// Calls the corresponding `visit_*` function on the visitor.
fn visit<V: Visitor<'tcx>>(self, visitor: &mut V);
}
+impl<'tcx, T> Visitable<'tcx> for &'tcx [T]
+where
+ &'tcx T: Visitable<'tcx>,
+{
+ fn visit<V: Visitor<'tcx>>(self, visitor: &mut V) {
+ for x in self {
+ x.visit(visitor);
+ }
+ }
+}
macro_rules! visitable_ref {
($t:ident, $f:ident) => {
impl<'tcx> Visitable<'tcx> for &'tcx $t<'tcx> {
@@ -151,7 +161,7 @@ pub fn for_each_expr_with_closures<'tcx, B, C: Continue>(
/// returns `true` if expr contains match expr desugared from try
fn contains_try(expr: &hir::Expr<'_>) -> bool {
for_each_expr(expr, |e| {
- if matches!(e.kind, hir::ExprKind::Match(_, _, hir::MatchSource::TryDesugar)) {
+ if matches!(e.kind, hir::ExprKind::Match(_, _, hir::MatchSource::TryDesugar(_))) {
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
@@ -319,7 +329,7 @@ pub fn is_const_evaluatable<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) ->
&& self.cx.typeck_results().expr_ty(rhs).peel_refs().is_primitive_ty() => {},
ExprKind::Unary(UnOp::Deref, e) if self.cx.typeck_results().expr_ty(e).is_ref() => (),
ExprKind::Unary(_, e) if self.cx.typeck_results().expr_ty(e).peel_refs().is_primitive_ty() => (),
- ExprKind::Index(base, _)
+ ExprKind::Index(base, _, _)
if matches!(
self.cx.typeck_results().expr_ty(base).peel_refs().kind(),
ty::Slice(_) | ty::Array(..)
@@ -619,7 +629,7 @@ pub fn for_each_unconsumed_temporary<'tcx, B>(
helper(typeck, true, arg, f)?;
}
},
- ExprKind::Index(borrowed, consumed)
+ ExprKind::Index(borrowed, consumed, _)
| ExprKind::Assign(borrowed, consumed, _)
| ExprKind::AssignOp(_, borrowed, consumed) => {
helper(typeck, false, borrowed, f)?;
diff --git a/src/tools/clippy/declare_clippy_lint/Cargo.toml b/src/tools/clippy/declare_clippy_lint/Cargo.toml
index 4dc906d00..3633ed31d 100644
--- a/src/tools/clippy/declare_clippy_lint/Cargo.toml
+++ b/src/tools/clippy/declare_clippy_lint/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "declare_clippy_lint"
-version = "0.1.72"
+version = "0.1.73"
edition = "2021"
publish = false
diff --git a/src/tools/clippy/lintcheck/src/config.rs b/src/tools/clippy/lintcheck/src/config.rs
index 3f01e9bb0..e678d4079 100644
--- a/src/tools/clippy/lintcheck/src/config.rs
+++ b/src/tools/clippy/lintcheck/src/config.rs
@@ -1,5 +1,6 @@
use clap::Parser;
-use std::{num::NonZeroUsize, path::PathBuf};
+use std::num::NonZeroUsize;
+use std::path::PathBuf;
#[derive(Clone, Debug, Parser)]
pub(crate) struct LintcheckConfig {
diff --git a/src/tools/clippy/lintcheck/src/main.rs b/src/tools/clippy/lintcheck/src/main.rs
index de56a6f82..3a022b343 100644
--- a/src/tools/clippy/lintcheck/src/main.rs
+++ b/src/tools/clippy/lintcheck/src/main.rs
@@ -15,16 +15,14 @@ use crate::config::LintcheckConfig;
use crate::recursive::LintcheckServer;
use std::collections::{HashMap, HashSet};
-use std::env;
use std::env::consts::EXE_SUFFIX;
use std::fmt::{self, Write as _};
-use std::fs;
use std::io::{self, ErrorKind};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::sync::atomic::{AtomicUsize, Ordering};
-use std::thread;
use std::time::Duration;
+use std::{env, fs, thread};
use cargo_metadata::diagnostic::{Diagnostic, DiagnosticLevel};
use cargo_metadata::Message;
diff --git a/src/tools/clippy/lintcheck/src/recursive.rs b/src/tools/clippy/lintcheck/src/recursive.rs
index 49072e651..994fa3c3b 100644
--- a/src/tools/clippy/lintcheck/src/recursive.rs
+++ b/src/tools/clippy/lintcheck/src/recursive.rs
@@ -3,8 +3,7 @@
//! [`LintcheckServer`] to ask if it should be skipped, and if not sends the stderr of running
//! clippy on the crate to the server
-use crate::ClippyWarning;
-use crate::RecursiveOptions;
+use crate::{ClippyWarning, RecursiveOptions};
use std::collections::HashSet;
use std::io::{BufRead, BufReader, Read, Write};
diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain
index 4475d914c..8b3f819f0 100644
--- a/src/tools/clippy/rust-toolchain
+++ b/src/tools/clippy/rust-toolchain
@@ -1,3 +1,3 @@
[toolchain]
-channel = "nightly-2023-06-29"
+channel = "nightly-2023-08-10"
components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"]
diff --git a/src/tools/clippy/rustfmt.toml b/src/tools/clippy/rustfmt.toml
index 18b2a3346..4248f42f6 100644
--- a/src/tools/clippy/rustfmt.toml
+++ b/src/tools/clippy/rustfmt.toml
@@ -4,5 +4,6 @@ match_block_trailing_comma = true
wrap_comments = true
edition = "2021"
error_on_line_overflow = true
+imports_granularity = "Module"
version = "Two"
ignore = ["tests/ui/crashes/ice-10912.rs"]
diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs
index 1eb288b15..1d89477dc 100644
--- a/src/tools/clippy/src/driver.rs
+++ b/src/tools/clippy/src/driver.rs
@@ -130,6 +130,13 @@ impl rustc_driver::Callbacks for ClippyCallbacks {
config.parse_sess_created = Some(Box::new(move |parse_sess| {
track_clippy_args(parse_sess, &clippy_args_var);
track_files(parse_sess);
+
+ // Trigger a rebuild if CLIPPY_CONF_DIR changes. The value must be a valid string so
+ // changes between dirs that are invalid UTF-8 will not trigger rebuilds
+ parse_sess.env_depinfo.get_mut().insert((
+ Symbol::intern("CLIPPY_CONF_DIR"),
+ env::var("CLIPPY_CONF_DIR").ok().map(|dir| Symbol::intern(&dir)),
+ ));
}));
config.register_lints = Some(Box::new(move |sess, lint_store| {
// technically we're ~guaranteed that this is none but might as well call anything that
@@ -185,7 +192,7 @@ You can use tool lints to allow or deny lints from your code, eg.:
);
}
-const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust-clippy/issues/new";
+const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust-clippy/issues/new?template=ice.yml";
#[allow(clippy::too_many_lines)]
pub fn main() {
diff --git a/src/tools/clippy/src/main.rs b/src/tools/clippy/src/main.rs
index cdc85cb33..26b655076 100644
--- a/src/tools/clippy/src/main.rs
+++ b/src/tools/clippy/src/main.rs
@@ -132,8 +132,7 @@ impl ClippyCmd {
let clippy_args: String = self
.clippy_args
.iter()
- .map(|arg| format!("{arg}__CLIPPY_HACKERY__"))
- .collect();
+ .fold(String::new(), |s, arg| s + arg + "__CLIPPY_HACKERY__");
// Currently, `CLIPPY_TERMINAL_WIDTH` is used only to format "unknown field" error messages.
let terminal_width = termize::dimensions().map_or(0, |(w, _)| w);
diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs
index 0fd37c640..e46f8bf6f 100644
--- a/src/tools/clippy/tests/compile-test.rs
+++ b/src/tools/clippy/tests/compile-test.rs
@@ -3,17 +3,108 @@
#![feature(is_sorted)]
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(unused_extern_crates)]
-use compiletest::{status_emitter, CommandBuilder};
+use compiletest::{status_emitter, CommandBuilder, OutputConflictHandling};
use ui_test as compiletest;
use ui_test::Mode as TestMode;
+use std::collections::BTreeMap;
use std::env::{self, remove_var, set_var, var_os};
use std::ffi::{OsStr, OsString};
use std::fs;
use std::path::{Path, PathBuf};
+use std::sync::LazyLock;
use test_utils::IS_RUSTC_TEST_SUITE;
+// Test dependencies may need an `extern crate` here to ensure that they show up
+// in the depinfo file (otherwise cargo thinks they are unused)
+extern crate clippy_lints;
+extern crate clippy_utils;
+extern crate derive_new;
+extern crate futures;
+extern crate if_chain;
+extern crate itertools;
+extern crate parking_lot;
+extern crate quote;
+extern crate syn;
+extern crate tokio;
+
+/// All crates used in UI tests are listed here
+static TEST_DEPENDENCIES: &[&str] = &[
+ "clippy_lints",
+ "clippy_utils",
+ "derive_new",
+ "futures",
+ "if_chain",
+ "itertools",
+ "parking_lot",
+ "quote",
+ "regex",
+ "serde_derive",
+ "serde",
+ "syn",
+ "tokio",
+];
+
+/// Produces a string with an `--extern` flag for all UI test crate
+/// dependencies.
+///
+/// The dependency files are located by parsing the depinfo file for this test
+/// module. This assumes the `-Z binary-dep-depinfo` flag is enabled. All test
+/// dependencies must be added to Cargo.toml at the project root. Test
+/// dependencies that are not *directly* used by this test module require an
+/// `extern crate` declaration.
+static EXTERN_FLAGS: LazyLock<Vec<String>> = LazyLock::new(|| {
+ let current_exe_depinfo = {
+ let mut path = env::current_exe().unwrap();
+ path.set_extension("d");
+ fs::read_to_string(path).unwrap()
+ };
+ let mut crates = BTreeMap::<&str, &str>::new();
+ for line in current_exe_depinfo.lines() {
+ // each dependency is expected to have a Makefile rule like `/path/to/crate-hash.rlib:`
+ let parse_name_path = || {
+ if line.starts_with(char::is_whitespace) {
+ return None;
+ }
+ let path_str = line.strip_suffix(':')?;
+ let path = Path::new(path_str);
+ if !matches!(path.extension()?.to_str()?, "rlib" | "so" | "dylib" | "dll") {
+ return None;
+ }
+ let (name, _hash) = path.file_stem()?.to_str()?.rsplit_once('-')?;
+ // the "lib" prefix is not present for dll files
+ let name = name.strip_prefix("lib").unwrap_or(name);
+ Some((name, path_str))
+ };
+ if let Some((name, path)) = parse_name_path() {
+ if TEST_DEPENDENCIES.contains(&name) {
+ // A dependency may be listed twice if it is available in sysroot,
+ // and the sysroot dependencies are listed first. As of the writing,
+ // this only seems to apply to if_chain.
+ crates.insert(name, path);
+ }
+ }
+ }
+ let not_found: Vec<&str> = TEST_DEPENDENCIES
+ .iter()
+ .copied()
+ .filter(|n| !crates.contains_key(n))
+ .collect();
+ assert!(
+ not_found.is_empty(),
+ "dependencies not found in depinfo: {not_found:?}\n\
+ help: Make sure the `-Z binary-dep-depinfo` rust flag is enabled\n\
+ help: Try adding to dev-dependencies in Cargo.toml\n\
+ help: Be sure to also add `extern crate ...;` to tests/compile-test.rs",
+ );
+ crates
+ .into_iter()
+ .map(|(name, path)| format!("--extern={name}={path}"))
+ .collect()
+});
+
mod test_utils;
// whether to run internal tests or not
@@ -24,12 +115,13 @@ fn base_config(test_dir: &str) -> compiletest::Config {
mode: TestMode::Yolo,
stderr_filters: vec![],
stdout_filters: vec![],
- output_conflict_handling: if var_os("BLESS").is_some() || env::args().any(|arg| arg == "--bless") {
- compiletest::OutputConflictHandling::Bless
+ output_conflict_handling: if var_os("RUSTC_BLESS").is_some_and(|v| v != "0")
+ || env::args().any(|arg| arg == "--bless")
+ {
+ OutputConflictHandling::Bless
} else {
- compiletest::OutputConflictHandling::Error("cargo test -- -- --bless".into())
+ OutputConflictHandling::Error("cargo uibless".into())
},
- dependencies_crate_manifest_path: Some("clippy_test_deps/Cargo.toml".into()),
target: None,
out_dir: PathBuf::from(std::env::var_os("CARGO_TARGET_DIR").unwrap_or("target".into())).join("ui_test"),
..compiletest::Config::rustc(Path::new("tests").join(test_dir))
@@ -44,10 +136,24 @@ fn base_config(test_dir: &str) -> compiletest::Config {
let deps_path = current_exe_path.parent().unwrap();
let profile_path = deps_path.parent().unwrap();
- config.program.args.push("--emit=metadata".into());
- config.program.args.push("-Aunused".into());
- config.program.args.push("-Zui-testing".into());
- config.program.args.push("-Dwarnings".into());
+ config.program.args.extend(
+ [
+ "--emit=metadata",
+ "-Aunused",
+ "-Ainternal_features",
+ "-Zui-testing",
+ "-Dwarnings",
+ &format!("-Ldependency={}", deps_path.display()),
+ ]
+ .map(OsString::from),
+ );
+
+ config.program.args.extend(EXTERN_FLAGS.iter().map(OsString::from));
+
+ if let Some(host_libs) = option_env!("HOST_LIBS") {
+ let dep = format!("-Ldependency={}", Path::new(host_libs).join("deps").display());
+ config.program.args.push(dep.into());
+ }
// Normalize away slashes in windows paths.
config.stderr_filter(r"\\", "/");
@@ -84,9 +190,6 @@ fn run_ui() {
.to_string()
}),
);
- eprintln!(" Compiler: {}", config.program.display());
-
- let name = config.root_dir.display().to_string();
let test_filter = test_filter();
@@ -94,7 +197,7 @@ fn run_ui() {
config,
move |path| compiletest::default_file_filter(path) && test_filter(path),
compiletest::default_per_file_config,
- (status_emitter::Text, status_emitter::Gha::<true> { name }),
+ status_emitter::Text,
)
.unwrap();
check_rustfix_coverage();
@@ -106,9 +209,18 @@ fn run_internal_tests() {
return;
}
let mut config = base_config("ui-internal");
- config.dependency_builder.args.push("--features".into());
- config.dependency_builder.args.push("internal".into());
- compiletest::run_tests(config).unwrap();
+ if let OutputConflictHandling::Error(err) = &mut config.output_conflict_handling {
+ *err = "cargo uitest --features internal -- -- --bless".into();
+ }
+ let test_filter = test_filter();
+
+ compiletest::run_tests_generic(
+ config,
+ move |path| compiletest::default_file_filter(path) && test_filter(path),
+ compiletest::default_per_file_config,
+ status_emitter::Text,
+ )
+ .unwrap();
}
fn run_ui_toml() {
@@ -127,13 +239,11 @@ fn run_ui_toml() {
"$$DIR",
);
- let name = config.root_dir.display().to_string();
-
let test_filter = test_filter();
ui_test::run_tests_generic(
config,
- |path| test_filter(path) && path.extension() == Some("rs".as_ref()),
+ |path| compiletest::default_file_filter(path) && test_filter(path),
|config, path| {
let mut config = config.clone();
config
@@ -142,7 +252,7 @@ fn run_ui_toml() {
.push(("CLIPPY_CONF_DIR".into(), Some(path.parent().unwrap().into())));
Some(config)
},
- (status_emitter::Text, status_emitter::Gha::<true> { name }),
+ status_emitter::Text,
)
.unwrap();
}
@@ -183,8 +293,6 @@ fn run_ui_cargo() {
"$$DIR",
);
- let name = config.root_dir.display().to_string();
-
let test_filter = test_filter();
ui_test::run_tests_generic(
@@ -195,7 +303,7 @@ fn run_ui_cargo() {
config.out_dir = PathBuf::from("target/ui_test_cargo/").join(path.parent().unwrap());
Some(config)
},
- (status_emitter::Text, status_emitter::Gha::<true> { name }),
+ status_emitter::Text,
)
.unwrap();
}
@@ -211,12 +319,45 @@ fn main() {
}
set_var("CLIPPY_DISABLE_DOCS_LINKS", "true");
- run_ui();
- run_ui_toml();
- run_ui_cargo();
- run_internal_tests();
- rustfix_coverage_known_exceptions_accuracy();
- ui_cargo_toml_metadata();
+ // The SPEEDTEST_* env variables can be used to check Clippy's performance on your PR. It runs the
+ // affected test 1000 times and gets the average.
+ if let Ok(speedtest) = std::env::var("SPEEDTEST") {
+ println!("----------- STARTING SPEEDTEST -----------");
+ let f = match speedtest.as_str() {
+ "ui" => run_ui as fn(),
+ "cargo" => run_ui_cargo as fn(),
+ "toml" => run_ui_toml as fn(),
+ "internal" => run_internal_tests as fn(),
+ "rustfix-coverage-known-exceptions-accuracy" => rustfix_coverage_known_exceptions_accuracy as fn(),
+ "ui-cargo-toml-metadata" => ui_cargo_toml_metadata as fn(),
+
+ _ => panic!("unknown speedtest: {speedtest} || accepted speedtests are: [ui, cargo, toml, internal]"),
+ };
+
+ let iterations;
+ if let Ok(iterations_str) = std::env::var("SPEEDTEST_ITERATIONS") {
+ iterations = iterations_str
+ .parse::<u64>()
+ .unwrap_or_else(|_| panic!("Couldn't parse `{iterations_str}`, please use a valid u64"));
+ } else {
+ iterations = 1000;
+ }
+
+ let mut sum = 0;
+ for _ in 0..iterations {
+ let start = std::time::Instant::now();
+ f();
+ sum += start.elapsed().as_millis();
+ }
+ println!("average {} time: {} millis.", speedtest.to_uppercase(), sum / 1000);
+ } else {
+ run_ui();
+ run_ui_toml();
+ run_ui_cargo();
+ run_internal_tests();
+ rustfix_coverage_known_exceptions_accuracy();
+ ui_cargo_toml_metadata();
+ }
}
const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[
diff --git a/src/tools/clippy/tests/integration.rs b/src/tools/clippy/tests/integration.rs
index a771d8b87..031982edb 100644
--- a/src/tools/clippy/tests/integration.rs
+++ b/src/tools/clippy/tests/integration.rs
@@ -65,6 +65,30 @@ fn integration_test() {
.expect("unable to run clippy");
let stderr = String::from_utf8_lossy(&output.stderr);
+
+ // debug:
+ eprintln!("{stderr}");
+
+ // this is an internal test to make sure we would correctly panic on a delay_span_bug
+ if repo_name == "matthiaskrgr/clippy_ci_panic_test" {
+ // we need to kind of switch around our logic here:
+ // if we find a panic, everything is fine, if we don't panic, SOMETHING is broken about our testing
+
+ // the repo basically just contains a delay_span_bug that forces rustc/clippy to panic:
+ /*
+ #![feature(rustc_attrs)]
+ #[rustc_error(delay_span_bug_from_inside_query)]
+ fn main() {}
+ */
+
+ if stderr.find("error: internal compiler error").is_some() {
+ eprintln!("we saw that we intentionally panicked, yay");
+ return;
+ }
+
+ panic!("panic caused by delay_span_bug was NOT detected! Something is broken!");
+ }
+
if let Some(backtrace_start) = stderr.find("error: internal compiler error") {
static BACKTRACE_END_MSG: &str = "end of query stack";
let backtrace_end = stderr[backtrace_start..]
diff --git a/src/tools/clippy/tests/lint_message_convention.rs b/src/tools/clippy/tests/lint_message_convention.rs
index 15e5cdd69..98019c755 100644
--- a/src/tools/clippy/tests/lint_message_convention.rs
+++ b/src/tools/clippy/tests/lint_message_convention.rs
@@ -18,18 +18,20 @@ impl Message {
fn new(path: PathBuf) -> Self {
// we don't want the first letter after "error: ", "help: " ... to be capitalized
// also no punctuation (except for "?" ?) at the end of a line
+ // Prefer "try" over "try this".
static REGEX_SET: LazyLock<RegexSet> = LazyLock::new(|| {
RegexSet::new([
"error: [A-Z]",
"help: [A-Z]",
"warning: [A-Z]",
"note: [A-Z]",
- "try this: [A-Z]",
+ "try: [A-Z]",
"error: .*[.!]$",
"help: .*[.!]$",
"warning: .*[.!]$",
"note: .*[.!]$",
- "try this: .*[.!]$",
+ "try: .*[.!]$",
+ "try this",
])
.unwrap()
});
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs
index 1a69bb241..c67166fc4 100644
--- a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs
index 1a69bb241..c67166fc4 100644
--- a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs
index 1a69bb241..c67166fc4 100644
--- a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/fail_publish_true/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs
index 1a69bb241..c67166fc4 100644
--- a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs
index 1a69bb241..c67166fc4 100644
--- a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_empty/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs
index 1a69bb241..c67166fc4 100644
--- a/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/cargo_common_metadata/pass_publish_false/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs
index 4dd9582af..74e40c09e 100644
--- a/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/fail/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=feature_name
#![warn(clippy::redundant_feature_names)]
#![warn(clippy::negative_feature_names)]
diff --git a/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs
index 4dd9582af..74e40c09e 100644
--- a/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/feature_name/pass/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=feature_name
#![warn(clippy::redundant_feature_names)]
#![warn(clippy::negative_feature_names)]
diff --git a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/src/main.rs b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/src/main.rs
index c70d92e35..ac21b3a44 100644
--- a/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/module_style/fail_mod_remap/src/main.rs
@@ -1,3 +1,4 @@
+// FIXME: find a way to add rustflags to ui-cargo tests
//@compile-flags: --remap-path-prefix {{src-base}}=/remapped
#![warn(clippy::self_named_module_files)]
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs
index ece260b74..4bc61dd62 100644
--- a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/5041_allow_dev_build/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=multiple_crate_versions
#![warn(clippy::multiple_crate_versions)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs
index ece260b74..4bc61dd62 100644
--- a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/fail/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=multiple_crate_versions
#![warn(clippy::multiple_crate_versions)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs
index ece260b74..4bc61dd62 100644
--- a/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/multiple_crate_versions/pass/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=multiple_crate_versions
#![warn(clippy::multiple_crate_versions)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs
index bb3a39d07..3491ccb0d 100644
--- a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/fail/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=wildcard_dependencies
#![warn(clippy::wildcard_dependencies)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs
index bb3a39d07..3491ccb0d 100644
--- a/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs
+++ b/src/tools/clippy/tests/ui-cargo/wildcard_dependencies/pass/src/main.rs
@@ -1,4 +1,3 @@
-//@compile-flags: --crate-name=wildcard_dependencies
#![warn(clippy::wildcard_dependencies)]
fn main() {}
diff --git a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
index b88aeae2a..d8b158816 100644
--- a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
+++ b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr
@@ -1,9 +1,10 @@
-thread '<unnamed>' panicked at 'Would you like some help with that?', clippy_lints/src/utils/internal_lints/produce_ice.rs
+thread '<unnamed>' panicked at clippy_lints/src/utils/internal_lints/produce_ice.rs:
+Would you like some help with that?
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
error: the compiler unexpectedly panicked. this is a bug.
-note: we would appreciate a bug report: https://github.com/rust-lang/rust-clippy/issues/new
+note: we would appreciate a bug report: https://github.com/rust-lang/rust-clippy/issues/new?template=ice.yml
note: rustc <version> running on <target>
diff --git a/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.allow_crates.stderr b/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.allow_crates.stderr
new file mode 100644
index 000000000..a8900da4e
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.allow_crates.stderr
@@ -0,0 +1,28 @@
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:40:5
+ |
+LL | std::f32::MAX;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::absolute-paths` implied by `-D warnings`
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:41:5
+ |
+LL | core::f32::MAX;
+ | ^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:42:5
+ |
+LL | ::core::f32::MAX;
+ | ^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:58:5
+ |
+LL | ::std::f32::MAX;
+ | ^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.disallow_crates.stderr b/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.disallow_crates.stderr
new file mode 100644
index 000000000..41b70644b
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.disallow_crates.stderr
@@ -0,0 +1,70 @@
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:40:5
+ |
+LL | std::f32::MAX;
+ | ^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::absolute-paths` implied by `-D warnings`
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:41:5
+ |
+LL | core::f32::MAX;
+ | ^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:42:5
+ |
+LL | ::core::f32::MAX;
+ | ^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:43:5
+ |
+LL | crate::a::b::c::C;
+ | ^^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:44:5
+ |
+LL | crate::a::b::c::d::e::f::F;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:45:5
+ |
+LL | crate::a::A;
+ | ^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:46:5
+ |
+LL | crate::a::b::B;
+ | ^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:47:5
+ |
+LL | crate::a::b::c::C::ZERO;
+ | ^^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:48:5
+ |
+LL | helper::b::c::d::e::f();
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:49:5
+ |
+LL | ::helper::b::c::d::e::f();
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: consider bringing this path into scope with the `use` keyword
+ --> $DIR/absolute_paths.rs:58:5
+ |
+LL | ::std::f32::MAX;
+ | ^^^^^^^^^^^^^^^
+
+error: aborting due to 11 previous errors
+
diff --git a/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.rs b/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.rs
new file mode 100644
index 000000000..d4c250a8f
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/absolute_paths/absolute_paths.rs
@@ -0,0 +1,97 @@
+//@aux-build:../../ui/auxiliary/proc_macros.rs:proc-macro
+//@aux-build:helper.rs
+//@revisions: allow_crates disallow_crates
+//@[allow_crates] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/absolute_paths/allow_crates
+//@[disallow_crates] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/absolute_paths/disallow_crates
+#![allow(clippy::no_effect, unused)]
+#![warn(clippy::absolute_paths)]
+#![feature(decl_macro)]
+
+extern crate helper;
+#[macro_use]
+extern crate proc_macros;
+
+pub mod a {
+ pub mod b {
+ pub mod c {
+ pub struct C;
+
+ impl C {
+ pub const ZERO: u32 = 0;
+ }
+
+ pub mod d {
+ pub mod e {
+ pub mod f {
+ pub struct F;
+ }
+ }
+ }
+ }
+
+ pub struct B;
+ }
+
+ pub struct A;
+}
+
+fn main() {
+ f32::max(1.0, 2.0);
+ std::f32::MAX;
+ core::f32::MAX;
+ ::core::f32::MAX;
+ crate::a::b::c::C;
+ crate::a::b::c::d::e::f::F;
+ crate::a::A;
+ crate::a::b::B;
+ crate::a::b::c::C::ZERO;
+ helper::b::c::d::e::f();
+ ::helper::b::c::d::e::f();
+ fn b() -> a::b::B {
+ todo!()
+ }
+ std::println!("a");
+ let x = 1;
+ std::ptr::addr_of!(x);
+ // Test we handle max segments with `PathRoot` properly; this has 4 segments but we should say it
+ // has 3
+ ::std::f32::MAX;
+ // Do not lint due to the above
+ ::helper::a();
+ // Do not lint
+ helper::a();
+ use crate::a::b::c::C;
+ use a::b;
+ use std::f32::MAX;
+ a::b::c::d::e::f::F;
+ b::c::C;
+ fn a() -> a::A {
+ todo!()
+ }
+ use a::b::c;
+
+ fn c() -> c::C {
+ todo!()
+ }
+ fn d() -> Result<(), ()> {
+ todo!()
+ }
+ external! {
+ crate::a::b::c::C::ZERO;
+ }
+ // For some reason, `path.span.from_expansion()` takes care of this for us
+ with_span! {
+ span
+ crate::a::b::c::C::ZERO;
+ }
+ macro_rules! local_crate {
+ () => {
+ crate::a::b::c::C::ZERO;
+ };
+ }
+ macro local_crate_2_0() {
+ crate::a::b::c::C::ZERO;
+ }
+ local_crate!();
+ local_crate_2_0!();
+}
diff --git a/src/tools/clippy/tests/ui-toml/absolute_paths/allow_crates/clippy.toml b/src/tools/clippy/tests/ui-toml/absolute_paths/allow_crates/clippy.toml
new file mode 100644
index 000000000..59a621e9d
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/absolute_paths/allow_crates/clippy.toml
@@ -0,0 +1,2 @@
+absolute-paths-max-segments = 2
+absolute-paths-allowed-crates = ["crate", "helper"]
diff --git a/src/tools/clippy/tests/ui-toml/absolute_paths/auxiliary/helper.rs b/src/tools/clippy/tests/ui-toml/absolute_paths/auxiliary/helper.rs
new file mode 100644
index 000000000..8e2678f5f
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/absolute_paths/auxiliary/helper.rs
@@ -0,0 +1,11 @@
+pub fn a() {}
+
+pub mod b {
+ pub mod c {
+ pub mod d {
+ pub mod e {
+ pub fn f() {}
+ }
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui-toml/absolute_paths/disallow_crates/clippy.toml b/src/tools/clippy/tests/ui-toml/absolute_paths/disallow_crates/clippy.toml
new file mode 100644
index 000000000..d44d648c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui-toml/absolute_paths/disallow_crates/clippy.toml
@@ -0,0 +1 @@
+absolute-paths-max-segments = 2
diff --git a/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr b/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr
index 6ec79a618..eb1180e60 100644
--- a/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr
+++ b/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr
@@ -30,7 +30,7 @@ LL | println!("Hello {} is {:.*}", "x", local_i32, local_f64);
| ^^^
|
= note: `-D clippy::print-literal` implied by `-D warnings`
-help: try this
+help: try
|
LL - println!("Hello {} is {:.*}", "x", local_i32, local_f64);
LL + println!("Hello x is {:.*}", local_i32, local_f64);
diff --git a/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs b/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs
index ebadd4e44..60fbaaea3 100644
--- a/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs
+++ b/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs
@@ -7,13 +7,10 @@
extern crate proc_macro;
use core::mem;
-use proc_macro::{
- token_stream::IntoIter,
- Delimiter::{self, Brace, Parenthesis},
- Group, Ident, Literal, Punct,
- Spacing::{self, Alone, Joint},
- Span, TokenStream, TokenTree as TT,
-};
+use proc_macro::token_stream::IntoIter;
+use proc_macro::Delimiter::{self, Brace, Parenthesis};
+use proc_macro::Spacing::{self, Alone, Joint};
+use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree as TT};
type Result<T> = core::result::Result<T, TokenStream>;
diff --git a/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr b/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr
index 9eef0e1bf..815d00935 100644
--- a/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr
+++ b/src/tools/clippy/tests/ui-toml/expect_used/expect_used.stderr
@@ -4,7 +4,7 @@ error: used `expect()` on an `Option` value
LL | let _ = opt.expect("");
| ^^^^^^^^^^^^^^
|
- = help: if this value is `None`, it will panic
+ = note: if this value is `None`, it will panic
= note: `-D clippy::expect-used` implied by `-D warnings`
error: used `expect()` on a `Result` value
@@ -13,7 +13,7 @@ error: used `expect()` on a `Result` value
LL | let _ = res.expect("");
| ^^^^^^^^^^^^^^
|
- = help: if this value is an `Err`, it will panic
+ = note: if this value is an `Err`, it will panic
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs
index f267a67f4..78784bfff 100644
--- a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs
+++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs
@@ -2,6 +2,7 @@
//@normalize-stderr-test: "\(limit: \d+ byte\)" -> "(limit: N byte)"
#![warn(clippy::trivially_copy_pass_by_ref)]
+#![allow(clippy::needless_pass_by_ref_mut)]
#[derive(Copy, Clone)]
struct Foo(u8);
diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr
index d2b55eff1..db5d68053 100644
--- a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr
+++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr
@@ -1,5 +1,5 @@
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/test.rs:14:11
+ --> $DIR/test.rs:15:11
|
LL | fn bad(x: &u16, y: &Foo) {}
| ^^^^ help: consider passing by value instead: `u16`
@@ -7,7 +7,7 @@ LL | fn bad(x: &u16, y: &Foo) {}
= note: `-D clippy::trivially-copy-pass-by-ref` implied by `-D warnings`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/test.rs:14:20
+ --> $DIR/test.rs:15:20
|
LL | fn bad(x: &u16, y: &Foo) {}
| ^^^^ help: consider passing by value instead: `Foo`
diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
index 6ba26e977..cdabe6460 100644
--- a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
+++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr
@@ -1,4 +1,6 @@
error: error reading Clippy's configuration file: unknown field `foobar`, expected one of
+ absolute-paths-allowed-crates
+ absolute-paths-max-segments
accept-comment-above-attributes
accept-comment-above-statement
allow-dbg-in-tests
@@ -68,6 +70,8 @@ LL | foobar = 42
| ^^^^^^
error: error reading Clippy's configuration file: unknown field `barfoo`, expected one of
+ absolute-paths-allowed-crates
+ absolute-paths-max-segments
accept-comment-above-attributes
accept-comment-above-statement
allow-dbg-in-tests
diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs
index dde1c6d7c..e300ba18c 100644
--- a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs
+++ b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs
@@ -9,9 +9,7 @@
#![warn(clippy::unwrap_used)]
#![warn(clippy::get_unwrap)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, HashMap, VecDeque};
struct GetFalsePositive {
arr: [u32; 3],
diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr
index eb66a5cf5..10219beaf 100644
--- a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr
+++ b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr
@@ -1,199 +1,212 @@
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:40:17
+ --> $DIR/unwrap_used.rs:38:17
|
LL | let _ = boxed_slice.get(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&boxed_slice[1]`
|
= note: `-D clippy::get-unwrap` implied by `-D warnings`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:40:17
+ --> $DIR/unwrap_used.rs:38:17
|
LL | let _ = boxed_slice.get(1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
= note: `-D clippy::unwrap-used` implied by `-D warnings`
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:41:17
+ --> $DIR/unwrap_used.rs:39:17
|
LL | let _ = some_slice.get(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_slice[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_slice[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:41:17
+ --> $DIR/unwrap_used.rs:39:17
|
LL | let _ = some_slice.get(0).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:42:17
+ --> $DIR/unwrap_used.rs:40:17
|
LL | let _ = some_vec.get(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vec[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vec[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:42:17
+ --> $DIR/unwrap_used.rs:40:17
|
LL | let _ = some_vec.get(0).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:43:17
+ --> $DIR/unwrap_used.rs:41:17
|
LL | let _ = some_vecdeque.get(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vecdeque[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vecdeque[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:43:17
+ --> $DIR/unwrap_used.rs:41:17
|
LL | let _ = some_vecdeque.get(0).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a HashMap. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:44:17
+ --> $DIR/unwrap_used.rs:42:17
|
LL | let _ = some_hashmap.get(&1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_hashmap[&1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_hashmap[&1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:44:17
+ --> $DIR/unwrap_used.rs:42:17
|
LL | let _ = some_hashmap.get(&1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a BTreeMap. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:45:17
+ --> $DIR/unwrap_used.rs:43:17
|
LL | let _ = some_btreemap.get(&1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_btreemap[&1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_btreemap[&1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:45:17
+ --> $DIR/unwrap_used.rs:43:17
|
LL | let _ = some_btreemap.get(&1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:49:21
+ --> $DIR/unwrap_used.rs:47:21
|
LL | let _: u8 = *boxed_slice.get(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:49:22
+ --> $DIR/unwrap_used.rs:47:22
|
LL | let _: u8 = *boxed_slice.get(1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:54:9
+ --> $DIR/unwrap_used.rs:52:9
|
LL | *boxed_slice.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:54:10
+ --> $DIR/unwrap_used.rs:52:10
|
LL | *boxed_slice.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:55:9
+ --> $DIR/unwrap_used.rs:53:9
|
LL | *some_slice.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_slice[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_slice[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:55:10
+ --> $DIR/unwrap_used.rs:53:10
|
LL | *some_slice.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:56:9
+ --> $DIR/unwrap_used.rs:54:9
|
LL | *some_vec.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:56:10
+ --> $DIR/unwrap_used.rs:54:10
|
LL | *some_vec.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:57:9
+ --> $DIR/unwrap_used.rs:55:9
|
LL | *some_vecdeque.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vecdeque[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vecdeque[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:57:10
+ --> $DIR/unwrap_used.rs:55:10
|
LL | *some_vecdeque.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:69:17
+ --> $DIR/unwrap_used.rs:67:17
|
LL | let _ = some_vec.get(0..1).unwrap().to_vec();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:69:17
+ --> $DIR/unwrap_used.rs:67:17
|
LL | let _ = some_vec.get(0..1).unwrap().to_vec();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:70:17
+ --> $DIR/unwrap_used.rs:68:17
|
LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_used.rs:70:17
+ --> $DIR/unwrap_used.rs:68:17
|
LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:77:13
+ --> $DIR/unwrap_used.rs:75:13
|
LL | let _ = boxed_slice.get(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&boxed_slice[1]`
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/unwrap_used.rs:95:17
+ --> $DIR/unwrap_used.rs:93:17
|
LL | let _ = Box::new([0]).get(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&Box::new([0])[1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&Box::new([0])[1]`
error: aborting due to 28 previous errors
diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs b/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs
index b6fcca0a7..2940c2732 100644
--- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs
+++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs
@@ -1,6 +1,12 @@
+//@aux-build:proc_macros.rs:proc-macro
#![warn(clippy::arc_with_non_send_sync)]
#![allow(unused_variables)]
+
+#[macro_use]
+extern crate proc_macros;
+
use std::cell::RefCell;
+use std::ptr::{null, null_mut};
use std::sync::{Arc, Mutex};
fn foo<T>(x: T) {
@@ -11,14 +17,32 @@ fn issue11076<T>() {
let a: Arc<Vec<T>> = Arc::new(Vec::new());
}
+fn issue11232() {
+ external! {
+ let a: Arc<*const u8> = Arc::new(null());
+ let a: Arc<*mut u8> = Arc::new(null_mut());
+ }
+ with_span! {
+ span
+ let a: Arc<*const u8> = Arc::new(null());
+ let a: Arc<*mut u8> = Arc::new(null_mut());
+ }
+}
+
fn main() {
let _ = Arc::new(42);
- // !Sync
let _ = Arc::new(RefCell::new(42));
+ //~^ ERROR: usage of an `Arc` that is not `Send` or `Sync`
+ //~| NOTE: the trait `Sync` is not implemented for `RefCell<i32>`
+
let mutex = Mutex::new(1);
- // !Send
let _ = Arc::new(mutex.lock().unwrap());
- // !Send + !Sync
+ //~^ ERROR: usage of an `Arc` that is not `Send` or `Sync`
+ //~| NOTE: the trait `Send` is not implemented for `MutexGuard<'_, i32>`
+
let _ = Arc::new(&42 as *const i32);
+ //~^ ERROR: usage of an `Arc` that is not `Send` or `Sync`
+ //~| NOTE: the trait `Send` is not implemented for `*const i32`
+ //~| NOTE: the trait `Sync` is not implemented for `*const i32`
}
diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr
index 7633b38df..de3f2fb9e 100644
--- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr
+++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr
@@ -1,5 +1,5 @@
error: usage of an `Arc` that is not `Send` or `Sync`
- --> $DIR/arc_with_non_send_sync.rs:18:13
+ --> $DIR/arc_with_non_send_sync.rs:35:13
|
LL | let _ = Arc::new(RefCell::new(42));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -10,7 +10,7 @@ LL | let _ = Arc::new(RefCell::new(42));
= note: `-D clippy::arc-with-non-send-sync` implied by `-D warnings`
error: usage of an `Arc` that is not `Send` or `Sync`
- --> $DIR/arc_with_non_send_sync.rs:21:13
+ --> $DIR/arc_with_non_send_sync.rs:40:13
|
LL | let _ = Arc::new(mutex.lock().unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -20,7 +20,7 @@ LL | let _ = Arc::new(mutex.lock().unwrap());
= help: consider using an `Rc` instead or wrapping the inner type with a `Mutex`
error: usage of an `Arc` that is not `Send` or `Sync`
- --> $DIR/arc_with_non_send_sync.rs:23:13
+ --> $DIR/arc_with_non_send_sync.rs:44:13
|
LL | let _ = Arc::new(&42 as *const i32);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs
index 4f38e50c8..2ac2fa220 100644
--- a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs
+++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs
@@ -481,4 +481,16 @@ pub fn issue_10792() {
let _ = 10 / TWO.c;
}
+pub fn issue_11145() {
+ let mut x: Wrapping<u32> = Wrapping(0_u32);
+ x += 1;
+}
+
+pub fn issue_11262() {
+ let one = 1;
+ let zero = 0;
+ let _ = 2 / one;
+ let _ = 2 / zero;
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/as_conversions.rs b/src/tools/clippy/tests/ui/as_conversions.rs
index 427842a51..69f1c541c 100644
--- a/src/tools/clippy/tests/ui/as_conversions.rs
+++ b/src/tools/clippy/tests/ui/as_conversions.rs
@@ -4,8 +4,7 @@
#![allow(clippy::borrow_as_ptr, unused)]
extern crate proc_macros;
-use proc_macros::external;
-use proc_macros::with_span;
+use proc_macros::{external, with_span};
fn main() {
let i = 0u32 as u64;
diff --git a/src/tools/clippy/tests/ui/as_conversions.stderr b/src/tools/clippy/tests/ui/as_conversions.stderr
index ca41d1378..54037a649 100644
--- a/src/tools/clippy/tests/ui/as_conversions.stderr
+++ b/src/tools/clippy/tests/ui/as_conversions.stderr
@@ -1,5 +1,5 @@
error: using a potentially dangerous silent `as` conversion
- --> $DIR/as_conversions.rs:11:13
+ --> $DIR/as_conversions.rs:10:13
|
LL | let i = 0u32 as u64;
| ^^^^^^^^^^^
@@ -8,7 +8,7 @@ LL | let i = 0u32 as u64;
= note: `-D clippy::as-conversions` implied by `-D warnings`
error: using a potentially dangerous silent `as` conversion
- --> $DIR/as_conversions.rs:13:13
+ --> $DIR/as_conversions.rs:12:13
|
LL | let j = &i as *const u64 as *mut u64;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -16,7 +16,7 @@ LL | let j = &i as *const u64 as *mut u64;
= help: consider using a safe wrapper for this conversion
error: using a potentially dangerous silent `as` conversion
- --> $DIR/as_conversions.rs:13:13
+ --> $DIR/as_conversions.rs:12:13
|
LL | let j = &i as *const u64 as *mut u64;
| ^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
index cab216b51..f20df6f0f 100644
--- a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
+++ b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs
@@ -15,8 +15,7 @@ pub mod inner {
// RE-EXPORT
// this will stick in `inner` module
- pub use macro_rules::mut_mut;
- pub use macro_rules::try_err;
+ pub use macro_rules::{mut_mut, try_err};
pub mod nested {
pub use macro_rules::string_add;
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs
index fdfe5fc41..c58795575 100644
--- a/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs
@@ -8,11 +8,11 @@ extern crate syn;
use proc_macro::TokenStream;
use quote::{quote, quote_spanned};
-use syn::parse_macro_input;
use syn::spanned::Spanned;
use syn::token::Star;
use syn::{
- parse_quote, FnArg, ImplItem, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType, Signature, TraitItem, Type,
+ parse_macro_input, parse_quote, FnArg, ImplItem, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType, Signature,
+ TraitItem, Type,
};
#[proc_macro_attribute]
diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs
index 4d008c8cb..43df65438 100644
--- a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs
+++ b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs
@@ -5,13 +5,10 @@
extern crate proc_macro;
use core::mem;
-use proc_macro::{
- token_stream::IntoIter,
- Delimiter::{self, Brace, Parenthesis},
- Group, Ident, Literal, Punct,
- Spacing::{self, Alone, Joint},
- Span, TokenStream, TokenTree as TT,
-};
+use proc_macro::token_stream::IntoIter;
+use proc_macro::Delimiter::{self, Brace, Parenthesis};
+use proc_macro::Spacing::{self, Alone, Joint};
+use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree as TT};
type Result<T> = core::result::Result<T, TokenStream>;
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map.stderr b/src/tools/clippy/tests/ui/bind_instead_of_map.stderr
index b6a81d21b..f17fee746 100644
--- a/src/tools/clippy/tests/ui/bind_instead_of_map.stderr
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map.stderr
@@ -14,7 +14,7 @@ error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed
--> $DIR/bind_instead_of_map.rs:10:13
|
LL | let _ = x.and_then(|o| Some(o + 1));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.map(|o| o + 1)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.map(|o| o + 1)`
error: using `Result.and_then(Ok)`, which is a no-op
--> $DIR/bind_instead_of_map.rs:16:13
diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr
index 0152a93fe..cedbca785 100644
--- a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr
+++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr
@@ -9,7 +9,7 @@ note: the lint level is defined here
|
LL | #![deny(clippy::bind_instead_of_map)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-help: try this
+help: try
|
LL | let _ = Some("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
| ~~~ ~ ~~~~~~~
@@ -20,7 +20,7 @@ error: using `Result.and_then(|x| Ok(y))`, which is more succinctly expressed as
LL | let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
-help: try this
+help: try
|
LL | let _ = Ok::<_, ()>("42").map(|s| if s.len() < 42 { 0 } else { s.len() });
| ~~~ ~ ~~~~~~~
@@ -31,7 +31,7 @@ error: using `Result.or_else(|x| Err(y))`, which is more succinctly expressed as
LL | let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
-help: try this
+help: try
|
LL | let _ = Err::<(), _>("42").map_err(|s| if s.len() < 42 { s.len() + 20 } else { s.len() });
| ~~~~~~~ ~~~~~~~~~~~~ ~~~~~~~
@@ -48,7 +48,7 @@ LL | | }
LL | | });
| |______^
|
-help: try this
+help: try
|
LL ~ Some("42").map(|s| {
LL | if {
@@ -82,7 +82,7 @@ error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed
LL | let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
-help: try this
+help: try
|
LL | let _ = Some("").map(|s| if s.len() == 20 { m!() } else { Some(20) });
| ~~~ ~~~~ ~~~~~~~~
diff --git a/src/tools/clippy/tests/ui/bool_comparison.fixed b/src/tools/clippy/tests/ui/bool_comparison.fixed
index d6774c035..8689f89d2 100644
--- a/src/tools/clippy/tests/ui/bool_comparison.fixed
+++ b/src/tools/clippy/tests/ui/bool_comparison.fixed
@@ -2,6 +2,7 @@
#![allow(clippy::needless_if)]
#![warn(clippy::bool_comparison)]
+#![allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
fn main() {
let x = true;
diff --git a/src/tools/clippy/tests/ui/bool_comparison.rs b/src/tools/clippy/tests/ui/bool_comparison.rs
index c0483fd73..a1c94aff9 100644
--- a/src/tools/clippy/tests/ui/bool_comparison.rs
+++ b/src/tools/clippy/tests/ui/bool_comparison.rs
@@ -2,6 +2,7 @@
#![allow(clippy::needless_if)]
#![warn(clippy::bool_comparison)]
+#![allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
fn main() {
let x = true;
diff --git a/src/tools/clippy/tests/ui/bool_comparison.stderr b/src/tools/clippy/tests/ui/bool_comparison.stderr
index f4dded365..19bdf3013 100644
--- a/src/tools/clippy/tests/ui/bool_comparison.stderr
+++ b/src/tools/clippy/tests/ui/bool_comparison.stderr
@@ -1,5 +1,5 @@
error: equality checks against true are unnecessary
- --> $DIR/bool_comparison.rs:8:8
+ --> $DIR/bool_comparison.rs:9:8
|
LL | if x == true {
| ^^^^^^^^^ help: try simplifying it as shown: `x`
@@ -7,127 +7,127 @@ LL | if x == true {
= note: `-D clippy::bool-comparison` implied by `-D warnings`
error: equality checks against false can be replaced by a negation
- --> $DIR/bool_comparison.rs:13:8
+ --> $DIR/bool_comparison.rs:14:8
|
LL | if x == false {
| ^^^^^^^^^^ help: try simplifying it as shown: `!x`
error: equality checks against true are unnecessary
- --> $DIR/bool_comparison.rs:18:8
+ --> $DIR/bool_comparison.rs:19:8
|
LL | if true == x {
| ^^^^^^^^^ help: try simplifying it as shown: `x`
error: equality checks against false can be replaced by a negation
- --> $DIR/bool_comparison.rs:23:8
+ --> $DIR/bool_comparison.rs:24:8
|
LL | if false == x {
| ^^^^^^^^^^ help: try simplifying it as shown: `!x`
error: inequality checks against true can be replaced by a negation
- --> $DIR/bool_comparison.rs:28:8
+ --> $DIR/bool_comparison.rs:29:8
|
LL | if x != true {
| ^^^^^^^^^ help: try simplifying it as shown: `!x`
error: inequality checks against false are unnecessary
- --> $DIR/bool_comparison.rs:33:8
+ --> $DIR/bool_comparison.rs:34:8
|
LL | if x != false {
| ^^^^^^^^^^ help: try simplifying it as shown: `x`
error: inequality checks against true can be replaced by a negation
- --> $DIR/bool_comparison.rs:38:8
+ --> $DIR/bool_comparison.rs:39:8
|
LL | if true != x {
| ^^^^^^^^^ help: try simplifying it as shown: `!x`
error: inequality checks against false are unnecessary
- --> $DIR/bool_comparison.rs:43:8
+ --> $DIR/bool_comparison.rs:44:8
|
LL | if false != x {
| ^^^^^^^^^^ help: try simplifying it as shown: `x`
error: less than comparison against true can be replaced by a negation
- --> $DIR/bool_comparison.rs:48:8
+ --> $DIR/bool_comparison.rs:49:8
|
LL | if x < true {
| ^^^^^^^^ help: try simplifying it as shown: `!x`
error: greater than checks against false are unnecessary
- --> $DIR/bool_comparison.rs:53:8
+ --> $DIR/bool_comparison.rs:54:8
|
LL | if false < x {
| ^^^^^^^^^ help: try simplifying it as shown: `x`
error: greater than checks against false are unnecessary
- --> $DIR/bool_comparison.rs:58:8
+ --> $DIR/bool_comparison.rs:59:8
|
LL | if x > false {
| ^^^^^^^^^ help: try simplifying it as shown: `x`
error: less than comparison against true can be replaced by a negation
- --> $DIR/bool_comparison.rs:63:8
+ --> $DIR/bool_comparison.rs:64:8
|
LL | if true > x {
| ^^^^^^^^ help: try simplifying it as shown: `!x`
error: order comparisons between booleans can be simplified
- --> $DIR/bool_comparison.rs:69:8
+ --> $DIR/bool_comparison.rs:70:8
|
LL | if x < y {
| ^^^^^ help: try simplifying it as shown: `!x & y`
error: order comparisons between booleans can be simplified
- --> $DIR/bool_comparison.rs:74:8
+ --> $DIR/bool_comparison.rs:75:8
|
LL | if x > y {
| ^^^^^ help: try simplifying it as shown: `x & !y`
error: this comparison might be written more concisely
- --> $DIR/bool_comparison.rs:122:8
+ --> $DIR/bool_comparison.rs:123:8
|
LL | if a == !b {};
| ^^^^^^^ help: try simplifying it as shown: `a != b`
error: this comparison might be written more concisely
- --> $DIR/bool_comparison.rs:123:8
+ --> $DIR/bool_comparison.rs:124:8
|
LL | if !a == b {};
| ^^^^^^^ help: try simplifying it as shown: `a != b`
error: this comparison might be written more concisely
- --> $DIR/bool_comparison.rs:127:8
+ --> $DIR/bool_comparison.rs:128:8
|
LL | if b == !a {};
| ^^^^^^^ help: try simplifying it as shown: `b != a`
error: this comparison might be written more concisely
- --> $DIR/bool_comparison.rs:128:8
+ --> $DIR/bool_comparison.rs:129:8
|
LL | if !b == a {};
| ^^^^^^^ help: try simplifying it as shown: `b != a`
error: equality checks against false can be replaced by a negation
- --> $DIR/bool_comparison.rs:152:8
+ --> $DIR/bool_comparison.rs:153:8
|
LL | if false == m!(func) {}
| ^^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `!m!(func)`
error: equality checks against false can be replaced by a negation
- --> $DIR/bool_comparison.rs:153:8
+ --> $DIR/bool_comparison.rs:154:8
|
LL | if m!(func) == false {}
| ^^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `!m!(func)`
error: equality checks against true are unnecessary
- --> $DIR/bool_comparison.rs:154:8
+ --> $DIR/bool_comparison.rs:155:8
|
LL | if true == m!(func) {}
| ^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `m!(func)`
error: equality checks against true are unnecessary
- --> $DIR/bool_comparison.rs:155:8
+ --> $DIR/bool_comparison.rs:156:8
|
LL | if m!(func) == true {}
| ^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `m!(func)`
diff --git a/src/tools/clippy/tests/ui/borrow_box.rs b/src/tools/clippy/tests/ui/borrow_box.rs
index 3b5b6bf4c..95b6b0f50 100644
--- a/src/tools/clippy/tests/ui/borrow_box.rs
+++ b/src/tools/clippy/tests/ui/borrow_box.rs
@@ -1,6 +1,10 @@
#![deny(clippy::borrowed_box)]
#![allow(dead_code, unused_variables)]
-#![allow(clippy::uninlined_format_args, clippy::disallowed_names)]
+#![allow(
+ clippy::uninlined_format_args,
+ clippy::disallowed_names,
+ clippy::needless_pass_by_ref_mut
+)]
use std::fmt::Display;
diff --git a/src/tools/clippy/tests/ui/borrow_box.stderr b/src/tools/clippy/tests/ui/borrow_box.stderr
index 99cb60a1e..90e752211 100644
--- a/src/tools/clippy/tests/ui/borrow_box.stderr
+++ b/src/tools/clippy/tests/ui/borrow_box.stderr
@@ -1,5 +1,5 @@
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:20:14
+ --> $DIR/borrow_box.rs:24:14
|
LL | let foo: &Box<bool>;
| ^^^^^^^^^^ help: try: `&bool`
@@ -11,55 +11,55 @@ LL | #![deny(clippy::borrowed_box)]
| ^^^^^^^^^^^^^^^^^^^^
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:24:10
+ --> $DIR/borrow_box.rs:28:10
|
LL | foo: &'a Box<bool>,
| ^^^^^^^^^^^^^ help: try: `&'a bool`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:28:17
+ --> $DIR/borrow_box.rs:32:17
|
LL | fn test4(a: &Box<bool>);
| ^^^^^^^^^^ help: try: `&bool`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:94:25
+ --> $DIR/borrow_box.rs:98:25
|
LL | pub fn test14(_display: &Box<dyn Display>) {}
| ^^^^^^^^^^^^^^^^^ help: try: `&dyn Display`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:95:25
+ --> $DIR/borrow_box.rs:99:25
|
LL | pub fn test15(_display: &Box<dyn Display + Send>) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(dyn Display + Send)`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:96:29
+ --> $DIR/borrow_box.rs:100:29
|
LL | pub fn test16<'a>(_display: &'a Box<dyn Display + 'a>) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&'a (dyn Display + 'a)`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:98:25
+ --> $DIR/borrow_box.rs:102:25
|
LL | pub fn test17(_display: &Box<impl Display>) {}
| ^^^^^^^^^^^^^^^^^^ help: try: `&impl Display`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:99:25
+ --> $DIR/borrow_box.rs:103:25
|
LL | pub fn test18(_display: &Box<impl Display + Send>) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(impl Display + Send)`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:100:29
+ --> $DIR/borrow_box.rs:104:29
|
LL | pub fn test19<'a>(_display: &'a Box<impl Display + 'a>) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&'a (impl Display + 'a)`
error: you seem to be trying to use `&Box<T>`. Consider using just `&T`
- --> $DIR/borrow_box.rs:105:25
+ --> $DIR/borrow_box.rs:109:25
|
LL | pub fn test20(_display: &Box<(dyn Display + Send)>) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(dyn Display + Send)`
diff --git a/src/tools/clippy/tests/ui/comparison_to_empty.fixed b/src/tools/clippy/tests/ui/comparison_to_empty.fixed
index c92dd509e..af219eed0 100644
--- a/src/tools/clippy/tests/ui/comparison_to_empty.fixed
+++ b/src/tools/clippy/tests/ui/comparison_to_empty.fixed
@@ -1,7 +1,8 @@
//@run-rustfix
#![warn(clippy::comparison_to_empty)]
-#![allow(clippy::useless_vec)]
+#![allow(clippy::borrow_deref_ref, clippy::needless_if, clippy::useless_vec)]
+#![feature(let_chains)]
fn main() {
// Disallow comparisons to empty
@@ -12,6 +13,11 @@ fn main() {
let v = vec![0];
let _ = v.is_empty();
let _ = !v.is_empty();
+ if (*v).is_empty() {}
+ let s = [0].as_slice();
+ if s.is_empty() {}
+ if s.is_empty() {}
+ if s.is_empty() && s.is_empty() {}
// Allow comparisons to non-empty
let s = String::new();
@@ -21,4 +27,8 @@ fn main() {
let v = vec![0];
let _ = v == [0];
let _ = v != [0];
+ if let [0] = &*v {}
+ let s = [0].as_slice();
+ if let [0] = s {}
+ if let [0] = &*s && s == [0] {}
}
diff --git a/src/tools/clippy/tests/ui/comparison_to_empty.rs b/src/tools/clippy/tests/ui/comparison_to_empty.rs
index b34897143..21e65184d 100644
--- a/src/tools/clippy/tests/ui/comparison_to_empty.rs
+++ b/src/tools/clippy/tests/ui/comparison_to_empty.rs
@@ -1,7 +1,8 @@
//@run-rustfix
#![warn(clippy::comparison_to_empty)]
-#![allow(clippy::useless_vec)]
+#![allow(clippy::borrow_deref_ref, clippy::needless_if, clippy::useless_vec)]
+#![feature(let_chains)]
fn main() {
// Disallow comparisons to empty
@@ -12,6 +13,11 @@ fn main() {
let v = vec![0];
let _ = v == [];
let _ = v != [];
+ if let [] = &*v {}
+ let s = [0].as_slice();
+ if let [] = s {}
+ if let [] = &*s {}
+ if let [] = &*s && s == [] {}
// Allow comparisons to non-empty
let s = String::new();
@@ -21,4 +27,8 @@ fn main() {
let v = vec![0];
let _ = v == [0];
let _ = v != [0];
+ if let [0] = &*v {}
+ let s = [0].as_slice();
+ if let [0] = s {}
+ if let [0] = &*s && s == [0] {}
}
diff --git a/src/tools/clippy/tests/ui/comparison_to_empty.stderr b/src/tools/clippy/tests/ui/comparison_to_empty.stderr
index cc09b17eb..f29782ed8 100644
--- a/src/tools/clippy/tests/ui/comparison_to_empty.stderr
+++ b/src/tools/clippy/tests/ui/comparison_to_empty.stderr
@@ -1,5 +1,5 @@
error: comparison to empty slice
- --> $DIR/comparison_to_empty.rs:9:13
+ --> $DIR/comparison_to_empty.rs:10:13
|
LL | let _ = s == "";
| ^^^^^^^ help: using `is_empty` is clearer and more explicit: `s.is_empty()`
@@ -7,22 +7,52 @@ LL | let _ = s == "";
= note: `-D clippy::comparison-to-empty` implied by `-D warnings`
error: comparison to empty slice
- --> $DIR/comparison_to_empty.rs:10:13
+ --> $DIR/comparison_to_empty.rs:11:13
|
LL | let _ = s != "";
| ^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!s.is_empty()`
error: comparison to empty slice
- --> $DIR/comparison_to_empty.rs:13:13
+ --> $DIR/comparison_to_empty.rs:14:13
|
LL | let _ = v == [];
| ^^^^^^^ help: using `is_empty` is clearer and more explicit: `v.is_empty()`
error: comparison to empty slice
- --> $DIR/comparison_to_empty.rs:14:13
+ --> $DIR/comparison_to_empty.rs:15:13
|
LL | let _ = v != [];
| ^^^^^^^ help: using `!is_empty` is clearer and more explicit: `!v.is_empty()`
-error: aborting due to 4 previous errors
+error: comparison to empty slice using `if let`
+ --> $DIR/comparison_to_empty.rs:16:8
+ |
+LL | if let [] = &*v {}
+ | ^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `(*v).is_empty()`
+
+error: comparison to empty slice using `if let`
+ --> $DIR/comparison_to_empty.rs:18:8
+ |
+LL | if let [] = s {}
+ | ^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `s.is_empty()`
+
+error: comparison to empty slice using `if let`
+ --> $DIR/comparison_to_empty.rs:19:8
+ |
+LL | if let [] = &*s {}
+ | ^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `s.is_empty()`
+
+error: comparison to empty slice using `if let`
+ --> $DIR/comparison_to_empty.rs:20:8
+ |
+LL | if let [] = &*s && s == [] {}
+ | ^^^^^^^^^^^^ help: using `is_empty` is clearer and more explicit: `s.is_empty()`
+
+error: comparison to empty slice
+ --> $DIR/comparison_to_empty.rs:20:24
+ |
+LL | if let [] = &*s && s == [] {}
+ | ^^^^^^^ help: using `is_empty` is clearer and more explicit: `s.is_empty()`
+
+error: aborting due to 9 previous errors
diff --git a/src/tools/clippy/tests/ui/const_comparisons.rs b/src/tools/clippy/tests/ui/const_comparisons.rs
new file mode 100644
index 000000000..8e265c914
--- /dev/null
+++ b/src/tools/clippy/tests/ui/const_comparisons.rs
@@ -0,0 +1,93 @@
+#![allow(unused)]
+#![warn(clippy::impossible_comparisons)]
+#![warn(clippy::redundant_comparisons)]
+#![allow(clippy::no_effect)]
+#![allow(clippy::short_circuit_statement)]
+#![allow(clippy::manual_range_contains)]
+
+const STATUS_BAD_REQUEST: u16 = 400;
+const STATUS_SERVER_ERROR: u16 = 500;
+
+struct Status {
+ code: u16,
+}
+
+impl PartialEq<u16> for Status {
+ fn eq(&self, other: &u16) -> bool {
+ self.code == *other
+ }
+}
+
+impl PartialOrd<u16> for Status {
+ fn partial_cmp(&self, other: &u16) -> Option<std::cmp::Ordering> {
+ self.code.partial_cmp(other)
+ }
+}
+
+impl PartialEq<Status> for u16 {
+ fn eq(&self, other: &Status) -> bool {
+ *self == other.code
+ }
+}
+
+impl PartialOrd<Status> for u16 {
+ fn partial_cmp(&self, other: &Status) -> Option<std::cmp::Ordering> {
+ self.partial_cmp(&other.code)
+ }
+}
+
+fn main() {
+ let status_code = 500; // Value doesn't matter for the lint
+ let status = Status { code: status_code };
+
+ status_code >= 400 && status_code < 500; // Correct
+ status_code <= 400 && status_code > 500;
+ status_code > 500 && status_code < 400;
+ status_code < 500 && status_code > 500;
+
+ // More complex expressions
+ status_code < { 400 } && status_code > { 500 };
+ status_code < STATUS_BAD_REQUEST && status_code > STATUS_SERVER_ERROR;
+ status_code <= u16::MIN + 1 && status_code > STATUS_SERVER_ERROR;
+ status_code < STATUS_SERVER_ERROR && status_code > STATUS_SERVER_ERROR;
+
+ // Comparing two different types, via the `impl PartialOrd<u16> for Status`
+ status < { 400 } && status > { 500 };
+ status < STATUS_BAD_REQUEST && status > STATUS_SERVER_ERROR;
+ status <= u16::MIN + 1 && status > STATUS_SERVER_ERROR;
+ status < STATUS_SERVER_ERROR && status > STATUS_SERVER_ERROR;
+
+ // Yoda conditions
+ 500 <= status_code && 600 > status_code; // Correct
+ 500 <= status_code && status_code <= 600; // Correct
+ 500 >= status_code && 600 < status_code; // Incorrect
+ 500 >= status_code && status_code > 600; // Incorrect
+
+ // Yoda conditions, comparing two different types
+ 500 <= status && 600 > status; // Correct
+ 500 <= status && status <= 600; // Correct
+ 500 >= status && 600 < status; // Incorrect
+ 500 >= status && status > 600; // Incorrect
+
+ // Expressions where one of the sides has no effect
+ status_code < 200 && status_code <= 299;
+ status_code > 200 && status_code >= 299;
+
+ status_code >= 500 && status_code > 500; // Useless left
+ status_code > 500 && status_code >= 500; // Useless right
+ status_code <= 500 && status_code < 500; // Useless left
+ status_code < 500 && status_code <= 500; // Useless right
+
+ // Other types
+ let name = "Steve";
+ name < "Jennifer" && name > "Shannon";
+
+ let numbers = [1, 2];
+ numbers < [3, 4] && numbers > [5, 6];
+
+ let letter = 'a';
+ letter < 'b' && letter > 'c';
+
+ let area = 42.0;
+ area < std::f32::consts::E && area > std::f32::consts::PI;
+}
diff --git a/src/tools/clippy/tests/ui/const_comparisons.stderr b/src/tools/clippy/tests/ui/const_comparisons.stderr
new file mode 100644
index 000000000..90e6db647
--- /dev/null
+++ b/src/tools/clippy/tests/ui/const_comparisons.stderr
@@ -0,0 +1,228 @@
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:44:5
+ |
+LL | status_code <= 400 && status_code > 500;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `400` < `500`, the expression evaluates to false for any value of `status_code`
+ = note: `-D clippy::impossible-comparisons` implied by `-D warnings`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:45:5
+ |
+LL | status_code > 500 && status_code < 400;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `500` > `400`, the expression evaluates to false for any value of `status_code`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:46:5
+ |
+LL | status_code < 500 && status_code > 500;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `status_code` cannot simultaneously be greater than and less than `500`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:49:5
+ |
+LL | status_code < { 400 } && status_code > { 500 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `{ 400 }` < `{ 500 }`, the expression evaluates to false for any value of `status_code`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:50:5
+ |
+LL | status_code < STATUS_BAD_REQUEST && status_code > STATUS_SERVER_ERROR;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `STATUS_BAD_REQUEST` < `STATUS_SERVER_ERROR`, the expression evaluates to false for any value of `status_code`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:51:5
+ |
+LL | status_code <= u16::MIN + 1 && status_code > STATUS_SERVER_ERROR;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `u16::MIN + 1` < `STATUS_SERVER_ERROR`, the expression evaluates to false for any value of `status_code`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:52:5
+ |
+LL | status_code < STATUS_SERVER_ERROR && status_code > STATUS_SERVER_ERROR;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `status_code` cannot simultaneously be greater than and less than `STATUS_SERVER_ERROR`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:55:5
+ |
+LL | status < { 400 } && status > { 500 };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `{ 400 }` < `{ 500 }`, the expression evaluates to false for any value of `status`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:56:5
+ |
+LL | status < STATUS_BAD_REQUEST && status > STATUS_SERVER_ERROR;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `STATUS_BAD_REQUEST` < `STATUS_SERVER_ERROR`, the expression evaluates to false for any value of `status`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:57:5
+ |
+LL | status <= u16::MIN + 1 && status > STATUS_SERVER_ERROR;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `u16::MIN + 1` < `STATUS_SERVER_ERROR`, the expression evaluates to false for any value of `status`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:58:5
+ |
+LL | status < STATUS_SERVER_ERROR && status > STATUS_SERVER_ERROR;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `status` cannot simultaneously be greater than and less than `STATUS_SERVER_ERROR`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:63:5
+ |
+LL | 500 >= status_code && 600 < status_code; // Incorrect
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `500` < `600`, the expression evaluates to false for any value of `status_code`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:64:5
+ |
+LL | 500 >= status_code && status_code > 600; // Incorrect
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `500` < `600`, the expression evaluates to false for any value of `status_code`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:69:5
+ |
+LL | 500 >= status && 600 < status; // Incorrect
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `500` < `600`, the expression evaluates to false for any value of `status`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:70:5
+ |
+LL | 500 >= status && status > 600; // Incorrect
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `500` < `600`, the expression evaluates to false for any value of `status`
+
+error: right-hand side of `&&` operator has no effect
+ --> $DIR/const_comparisons.rs:73:5
+ |
+LL | status_code < 200 && status_code <= 299;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `if `status_code < 200` evaluates to true, status_code <= 299` will always evaluate to true as well
+ --> $DIR/const_comparisons.rs:73:23
+ |
+LL | status_code < 200 && status_code <= 299;
+ | ^^^^^^^^^^^^^^^^^^^^^
+ = note: `-D clippy::redundant-comparisons` implied by `-D warnings`
+
+error: left-hand side of `&&` operator has no effect
+ --> $DIR/const_comparisons.rs:74:5
+ |
+LL | status_code > 200 && status_code >= 299;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `if `status_code >= 299` evaluates to true, status_code > 200` will always evaluate to true as well
+ --> $DIR/const_comparisons.rs:74:5
+ |
+LL | status_code > 200 && status_code >= 299;
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: left-hand side of `&&` operator has no effect
+ --> $DIR/const_comparisons.rs:76:5
+ |
+LL | status_code >= 500 && status_code > 500; // Useless left
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `if `status_code > 500` evaluates to true, status_code >= 500` will always evaluate to true as well
+ --> $DIR/const_comparisons.rs:76:5
+ |
+LL | status_code >= 500 && status_code > 500; // Useless left
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: right-hand side of `&&` operator has no effect
+ --> $DIR/const_comparisons.rs:77:5
+ |
+LL | status_code > 500 && status_code >= 500; // Useless right
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `if `status_code > 500` evaluates to true, status_code >= 500` will always evaluate to true as well
+ --> $DIR/const_comparisons.rs:77:23
+ |
+LL | status_code > 500 && status_code >= 500; // Useless right
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: left-hand side of `&&` operator has no effect
+ --> $DIR/const_comparisons.rs:78:5
+ |
+LL | status_code <= 500 && status_code < 500; // Useless left
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `if `status_code < 500` evaluates to true, status_code <= 500` will always evaluate to true as well
+ --> $DIR/const_comparisons.rs:78:5
+ |
+LL | status_code <= 500 && status_code < 500; // Useless left
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: right-hand side of `&&` operator has no effect
+ --> $DIR/const_comparisons.rs:79:5
+ |
+LL | status_code < 500 && status_code <= 500; // Useless right
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: `if `status_code < 500` evaluates to true, status_code <= 500` will always evaluate to true as well
+ --> $DIR/const_comparisons.rs:79:23
+ |
+LL | status_code < 500 && status_code <= 500; // Useless right
+ | ^^^^^^^^^^^^^^^^^^^^^
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:83:5
+ |
+LL | name < "Jennifer" && name > "Shannon";
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `"Jennifer"` < `"Shannon"`, the expression evaluates to false for any value of `name`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:86:5
+ |
+LL | numbers < [3, 4] && numbers > [5, 6];
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `[3, 4]` < `[5, 6]`, the expression evaluates to false for any value of `numbers`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:89:5
+ |
+LL | letter < 'b' && letter > 'c';
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `'b'` < `'c'`, the expression evaluates to false for any value of `letter`
+
+error: boolean expression will never evaluate to 'true'
+ --> $DIR/const_comparisons.rs:92:5
+ |
+LL | area < std::f32::consts::E && area > std::f32::consts::PI;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: since `std::f32::consts::E` < `std::f32::consts::PI`, the expression evaluates to false for any value of `area`
+
+error: aborting due to 25 previous errors
+
diff --git a/src/tools/clippy/tests/ui/crashes/ice-6256.rs b/src/tools/clippy/tests/ui/crashes/ice-6256.rs
index 1d336b3cd..bb488c2dc 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-6256.rs
+++ b/src/tools/clippy/tests/ui/crashes/ice-6256.rs
@@ -1,5 +1,5 @@
// originally from rustc ./tests/ui/regions/issue-78262.rs
-// ICE: to get the signature of a closure, use substs.as_closure().sig() not fn_sig()
+// ICE: to get the signature of a closure, use args.as_closure().sig() not fn_sig()
#![allow(clippy::upper_case_acronyms)]
trait TT {}
diff --git a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
index 84e0af3f0..0cd028516 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr
@@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/ice-7169.rs:10:12
|
LL | if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {}
- | -------^^^^^-------------------------------------- help: try this: `if Ok::<_, ()>(A::<String>::default()).is_ok()`
+ | -------^^^^^-------------------------------------- help: try: `if Ok::<_, ()>(A::<String>::default()).is_ok()`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
diff --git a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
index 8ed8f3b3a..e6f3644ef 100644
--- a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
+++ b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr
@@ -2,7 +2,7 @@ error: unnecessary use of `splitn`
--> $DIR/ice-8250.rs:2:13
|
LL | let _ = s[1..].splitn(2, '.').next()?;
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s[1..].split('.')`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `s[1..].split('.')`
|
= note: `-D clippy::needless-splitn` implied by `-D warnings`
diff --git a/src/tools/clippy/tests/ui/default_trait_access.fixed b/src/tools/clippy/tests/ui/default_trait_access.fixed
index 14eb6d572..6e541473c 100644
--- a/src/tools/clippy/tests/ui/default_trait_access.fixed
+++ b/src/tools/clippy/tests/ui/default_trait_access.fixed
@@ -7,9 +7,8 @@
extern crate proc_macros;
use proc_macros::with_span;
-use std::default;
use std::default::Default as D2;
-use std::string;
+use std::{default, string};
fn main() {
let s1: String = String::default();
diff --git a/src/tools/clippy/tests/ui/default_trait_access.rs b/src/tools/clippy/tests/ui/default_trait_access.rs
index aa2ced0a7..2ffeb32fb 100644
--- a/src/tools/clippy/tests/ui/default_trait_access.rs
+++ b/src/tools/clippy/tests/ui/default_trait_access.rs
@@ -7,9 +7,8 @@
extern crate proc_macros;
use proc_macros::with_span;
-use std::default;
use std::default::Default as D2;
-use std::string;
+use std::{default, string};
fn main() {
let s1: String = Default::default();
diff --git a/src/tools/clippy/tests/ui/default_trait_access.stderr b/src/tools/clippy/tests/ui/default_trait_access.stderr
index e4f73c08d..103fccf6a 100644
--- a/src/tools/clippy/tests/ui/default_trait_access.stderr
+++ b/src/tools/clippy/tests/ui/default_trait_access.stderr
@@ -1,5 +1,5 @@
error: calling `String::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:15:22
+ --> $DIR/default_trait_access.rs:14:22
|
LL | let s1: String = Default::default();
| ^^^^^^^^^^^^^^^^^^ help: try: `String::default()`
@@ -11,43 +11,43 @@ LL | #![deny(clippy::default_trait_access)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: calling `String::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:19:22
+ --> $DIR/default_trait_access.rs:18:22
|
LL | let s3: String = D2::default();
| ^^^^^^^^^^^^^ help: try: `String::default()`
error: calling `String::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:21:22
+ --> $DIR/default_trait_access.rs:20:22
|
LL | let s4: String = std::default::Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `String::default()`
error: calling `String::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:25:22
+ --> $DIR/default_trait_access.rs:24:22
|
LL | let s6: String = default::Default::default();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `String::default()`
error: calling `GenericDerivedDefault::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:35:46
+ --> $DIR/default_trait_access.rs:34:46
|
LL | let s11: GenericDerivedDefault<String> = Default::default();
| ^^^^^^^^^^^^^^^^^^ help: try: `GenericDerivedDefault::default()`
error: calling `TupleDerivedDefault::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:41:36
+ --> $DIR/default_trait_access.rs:40:36
|
LL | let s14: TupleDerivedDefault = Default::default();
| ^^^^^^^^^^^^^^^^^^ help: try: `TupleDerivedDefault::default()`
error: calling `ArrayDerivedDefault::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:43:36
+ --> $DIR/default_trait_access.rs:42:36
|
LL | let s15: ArrayDerivedDefault = Default::default();
| ^^^^^^^^^^^^^^^^^^ help: try: `ArrayDerivedDefault::default()`
error: calling `TupleStructDerivedDefault::default()` is more clear than this expression
- --> $DIR/default_trait_access.rs:47:42
+ --> $DIR/default_trait_access.rs:46:42
|
LL | let s17: TupleStructDerivedDefault = Default::default();
| ^^^^^^^^^^^^^^^^^^ help: try: `TupleStructDerivedDefault::default()`
diff --git a/src/tools/clippy/tests/ui/deref_addrof.stderr b/src/tools/clippy/tests/ui/deref_addrof.stderr
index e0287522f..9dd1e246b 100644
--- a/src/tools/clippy/tests/ui/deref_addrof.stderr
+++ b/src/tools/clippy/tests/ui/deref_addrof.stderr
@@ -2,7 +2,7 @@ error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:24:13
|
LL | let b = *&a;
- | ^^^ help: try this: `a`
+ | ^^^ help: try: `a`
|
= note: `-D clippy::deref-addrof` implied by `-D warnings`
@@ -10,49 +10,49 @@ error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:26:13
|
LL | let b = *&get_number();
- | ^^^^^^^^^^^^^^ help: try this: `get_number()`
+ | ^^^^^^^^^^^^^^ help: try: `get_number()`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:31:13
|
LL | let b = *&bytes[1..2][0];
- | ^^^^^^^^^^^^^^^^ help: try this: `bytes[1..2][0]`
+ | ^^^^^^^^^^^^^^^^ help: try: `bytes[1..2][0]`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:35:13
|
LL | let b = *&(a);
- | ^^^^^ help: try this: `(a)`
+ | ^^^^^ help: try: `(a)`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:37:13
|
LL | let b = *(&a);
- | ^^^^^ help: try this: `a`
+ | ^^^^^ help: try: `a`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:40:13
|
LL | let b = *((&a));
- | ^^^^^^^ help: try this: `a`
+ | ^^^^^^^ help: try: `a`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:42:13
|
LL | let b = *&&a;
- | ^^^^ help: try this: `&a`
+ | ^^^^ help: try: `&a`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:44:14
|
LL | let b = **&aref;
- | ^^^^^^ help: try this: `aref`
+ | ^^^^^^ help: try: `aref`
error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:54:17
|
LL | inline!(*& $(@expr self))
- | ^^^^^^^^^^^^^^^^ help: try this: `$(@expr self)`
+ | ^^^^^^^^^^^^^^^^ help: try: `$(@expr self)`
|
= note: this error originates in the macro `__inline_mac_impl` (in Nightly builds, run with -Z macro-backtrace for more info)
@@ -60,7 +60,7 @@ error: immediately dereferencing a reference
--> $DIR/deref_addrof.rs:58:17
|
LL | inline!(*&mut $(@expr self))
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `$(@expr self)`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `$(@expr self)`
|
= note: this error originates in the macro `__inline_mac_impl` (in Nightly builds, run with -Z macro-backtrace for more info)
diff --git a/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr
index 2c55a4ed6..6fa5069b6 100644
--- a/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr
+++ b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr
@@ -2,7 +2,7 @@ error: immediately dereferencing a reference
--> $DIR/deref_addrof_double_trigger.rs:10:14
|
LL | let b = **&&a;
- | ^^^^ help: try this: `&a`
+ | ^^^^ help: try: `&a`
|
= note: `-D clippy::deref-addrof` implied by `-D warnings`
@@ -10,13 +10,13 @@ error: immediately dereferencing a reference
--> $DIR/deref_addrof_double_trigger.rs:14:17
|
LL | let y = *&mut x;
- | ^^^^^^^ help: try this: `x`
+ | ^^^^^^^ help: try: `x`
error: immediately dereferencing a reference
--> $DIR/deref_addrof_double_trigger.rs:21:18
|
LL | let y = **&mut &mut x;
- | ^^^^^^^^^^^^ help: try this: `&mut x`
+ | ^^^^^^^^^^^^ help: try: `&mut x`
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/derive.rs b/src/tools/clippy/tests/ui/derive.rs
index e01079bc9..c76711312 100644
--- a/src/tools/clippy/tests/ui/derive.rs
+++ b/src/tools/clippy/tests/ui/derive.rs
@@ -1,4 +1,8 @@
-#![allow(clippy::incorrect_clone_impl_on_copy_type, dead_code)]
+#![allow(
+ clippy::incorrect_clone_impl_on_copy_type,
+ clippy::incorrect_partial_ord_impl_on_ord_type,
+ dead_code
+)]
#![warn(clippy::expl_impl_clone_on_copy)]
diff --git a/src/tools/clippy/tests/ui/derive.stderr b/src/tools/clippy/tests/ui/derive.stderr
index e1fbb8dcd..5d7ed0918 100644
--- a/src/tools/clippy/tests/ui/derive.stderr
+++ b/src/tools/clippy/tests/ui/derive.stderr
@@ -1,5 +1,5 @@
error: you are implementing `Clone` explicitly on a `Copy` type
- --> $DIR/derive.rs:8:1
+ --> $DIR/derive.rs:12:1
|
LL | / impl Clone for Qux {
LL | | fn clone(&self) -> Self {
@@ -9,7 +9,7 @@ LL | | }
| |_^
|
note: consider deriving `Clone` or removing `Copy`
- --> $DIR/derive.rs:8:1
+ --> $DIR/derive.rs:12:1
|
LL | / impl Clone for Qux {
LL | | fn clone(&self) -> Self {
@@ -20,7 +20,7 @@ LL | | }
= note: `-D clippy::expl-impl-clone-on-copy` implied by `-D warnings`
error: you are implementing `Clone` explicitly on a `Copy` type
- --> $DIR/derive.rs:32:1
+ --> $DIR/derive.rs:36:1
|
LL | / impl<'a> Clone for Lt<'a> {
LL | | fn clone(&self) -> Self {
@@ -30,7 +30,7 @@ LL | | }
| |_^
|
note: consider deriving `Clone` or removing `Copy`
- --> $DIR/derive.rs:32:1
+ --> $DIR/derive.rs:36:1
|
LL | / impl<'a> Clone for Lt<'a> {
LL | | fn clone(&self) -> Self {
@@ -40,7 +40,7 @@ LL | | }
| |_^
error: you are implementing `Clone` explicitly on a `Copy` type
- --> $DIR/derive.rs:43:1
+ --> $DIR/derive.rs:47:1
|
LL | / impl Clone for BigArray {
LL | | fn clone(&self) -> Self {
@@ -50,7 +50,7 @@ LL | | }
| |_^
|
note: consider deriving `Clone` or removing `Copy`
- --> $DIR/derive.rs:43:1
+ --> $DIR/derive.rs:47:1
|
LL | / impl Clone for BigArray {
LL | | fn clone(&self) -> Self {
@@ -60,7 +60,7 @@ LL | | }
| |_^
error: you are implementing `Clone` explicitly on a `Copy` type
- --> $DIR/derive.rs:54:1
+ --> $DIR/derive.rs:58:1
|
LL | / impl Clone for FnPtr {
LL | | fn clone(&self) -> Self {
@@ -70,7 +70,7 @@ LL | | }
| |_^
|
note: consider deriving `Clone` or removing `Copy`
- --> $DIR/derive.rs:54:1
+ --> $DIR/derive.rs:58:1
|
LL | / impl Clone for FnPtr {
LL | | fn clone(&self) -> Self {
@@ -80,7 +80,7 @@ LL | | }
| |_^
error: you are implementing `Clone` explicitly on a `Copy` type
- --> $DIR/derive.rs:74:1
+ --> $DIR/derive.rs:78:1
|
LL | / impl<T: Clone> Clone for Generic2<T> {
LL | | fn clone(&self) -> Self {
@@ -90,7 +90,7 @@ LL | | }
| |_^
|
note: consider deriving `Clone` or removing `Copy`
- --> $DIR/derive.rs:74:1
+ --> $DIR/derive.rs:78:1
|
LL | / impl<T: Clone> Clone for Generic2<T> {
LL | | fn clone(&self) -> Self {
diff --git a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs
index 6f12d36d7..1fb3d51c4 100644
--- a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs
+++ b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs
@@ -1,5 +1,6 @@
#![warn(clippy::derive_ord_xor_partial_ord)]
#![allow(clippy::unnecessary_wraps)]
+#![allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
use std::cmp::Ordering;
diff --git a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr
index 58efbb854..bd1488348 100644
--- a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr
+++ b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr
@@ -1,11 +1,11 @@
error: you are deriving `Ord` but have implemented `PartialOrd` explicitly
- --> $DIR/derive_ord_xor_partial_ord.rs:21:10
+ --> $DIR/derive_ord_xor_partial_ord.rs:22:10
|
LL | #[derive(Ord, PartialEq, Eq)]
| ^^^
|
note: `PartialOrd` implemented here
- --> $DIR/derive_ord_xor_partial_ord.rs:24:1
+ --> $DIR/derive_ord_xor_partial_ord.rs:25:1
|
LL | impl PartialOrd for DeriveOrd {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -13,20 +13,20 @@ LL | impl PartialOrd for DeriveOrd {
= note: this error originates in the derive macro `Ord` (in Nightly builds, run with -Z macro-backtrace for more info)
error: you are deriving `Ord` but have implemented `PartialOrd` explicitly
- --> $DIR/derive_ord_xor_partial_ord.rs:30:10
+ --> $DIR/derive_ord_xor_partial_ord.rs:31:10
|
LL | #[derive(Ord, PartialEq, Eq)]
| ^^^
|
note: `PartialOrd` implemented here
- --> $DIR/derive_ord_xor_partial_ord.rs:33:1
+ --> $DIR/derive_ord_xor_partial_ord.rs:34:1
|
LL | impl PartialOrd<DeriveOrdWithExplicitTypeVariable> for DeriveOrdWithExplicitTypeVariable {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the derive macro `Ord` (in Nightly builds, run with -Z macro-backtrace for more info)
error: you are implementing `Ord` explicitly but have derived `PartialOrd`
- --> $DIR/derive_ord_xor_partial_ord.rs:42:1
+ --> $DIR/derive_ord_xor_partial_ord.rs:43:1
|
LL | / impl std::cmp::Ord for DerivePartialOrd {
LL | | fn cmp(&self, other: &Self) -> Ordering {
@@ -36,14 +36,14 @@ LL | | }
| |_^
|
note: `PartialOrd` implemented here
- --> $DIR/derive_ord_xor_partial_ord.rs:39:10
+ --> $DIR/derive_ord_xor_partial_ord.rs:40:10
|
LL | #[derive(PartialOrd, PartialEq, Eq)]
| ^^^^^^^^^^
= note: this error originates in the derive macro `PartialOrd` (in Nightly builds, run with -Z macro-backtrace for more info)
error: you are implementing `Ord` explicitly but have derived `PartialOrd`
- --> $DIR/derive_ord_xor_partial_ord.rs:62:5
+ --> $DIR/derive_ord_xor_partial_ord.rs:63:5
|
LL | / impl Ord for DerivePartialOrdInUseOrd {
LL | | fn cmp(&self, other: &Self) -> Ordering {
@@ -53,7 +53,7 @@ LL | | }
| |_____^
|
note: `PartialOrd` implemented here
- --> $DIR/derive_ord_xor_partial_ord.rs:59:14
+ --> $DIR/derive_ord_xor_partial_ord.rs:60:14
|
LL | #[derive(PartialOrd, PartialEq, Eq)]
| ^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/entry.stderr b/src/tools/clippy/tests/ui/entry.stderr
index 2c4c49d25..e8a003e9c 100644
--- a/src/tools/clippy/tests/ui/entry.stderr
+++ b/src/tools/clippy/tests/ui/entry.stderr
@@ -4,7 +4,7 @@ error: usage of `contains_key` followed by `insert` on a `HashMap`
LL | / if !m.contains_key(&k) {
LL | | m.insert(k, v);
LL | | }
- | |_____^ help: try this: `m.entry(k).or_insert(v);`
+ | |_____^ help: try: `m.entry(k).or_insert(v);`
|
= note: `-D clippy::map-entry` implied by `-D warnings`
@@ -20,7 +20,7 @@ LL | | }
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ m.entry(k).or_insert_with(|| {
LL + if true {
@@ -43,7 +43,7 @@ LL | | };
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ m.entry(k).or_insert_with(|| {
LL + if true {
@@ -66,7 +66,7 @@ LL | | }
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
LL + if true {
@@ -87,7 +87,7 @@ LL | | m.insert(k, v);
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ m.entry(k).or_insert_with(|| {
LL + foo();
@@ -107,7 +107,7 @@ LL | | };
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ m.entry(k).or_insert_with(|| {
LL + match 0 {
@@ -133,7 +133,7 @@ LL | | };
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
LL + match 0 {
@@ -157,7 +157,7 @@ LL | | }
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ m.entry(k).or_insert_with(|| {
LL + foo();
@@ -192,7 +192,7 @@ error: usage of `contains_key` followed by `insert` on a `HashMap`
LL | / if !m.contains_key(&m!(k)) {
LL | | m.insert(m!(k), m!(v));
LL | | }
- | |_____^ help: try this: `m.entry(m!(k)).or_insert_with(|| m!(v));`
+ | |_____^ help: try: `m.entry(m!(k)).or_insert_with(|| m!(v));`
error: usage of `contains_key` followed by `insert` on a `HashMap`
--> $DIR/entry.rs:152:5
@@ -204,7 +204,7 @@ LL | | m.insert(k, v);
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ m.entry(k).or_insert_with(|| {
LL + let x = (String::new(), String::new());
diff --git a/src/tools/clippy/tests/ui/entry_btree.stderr b/src/tools/clippy/tests/ui/entry_btree.stderr
index 5c6fcdf1a..8f41581d6 100644
--- a/src/tools/clippy/tests/ui/entry_btree.stderr
+++ b/src/tools/clippy/tests/ui/entry_btree.stderr
@@ -8,7 +8,7 @@ LL | | }
| |_____^
|
= note: `-D clippy::map-entry` implied by `-D warnings`
-help: try this
+help: try
|
LL ~ if let std::collections::btree_map::Entry::Vacant(e) = m.entry(k) {
LL + e.insert(v);
diff --git a/src/tools/clippy/tests/ui/entry_with_else.stderr b/src/tools/clippy/tests/ui/entry_with_else.stderr
index e0f6671b4..0d0eb9649 100644
--- a/src/tools/clippy/tests/ui/entry_with_else.stderr
+++ b/src/tools/clippy/tests/ui/entry_with_else.stderr
@@ -9,7 +9,7 @@ LL | | }
| |_____^
|
= note: `-D clippy::map-entry` implied by `-D warnings`
-help: try this
+help: try
|
LL ~ match m.entry(k) {
LL + std::collections::hash_map::Entry::Vacant(e) => {
@@ -31,7 +31,7 @@ LL | | m.insert(k, v2);
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ match m.entry(k) {
LL + std::collections::hash_map::Entry::Occupied(mut e) => {
@@ -53,7 +53,7 @@ LL | | foo();
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) {
LL + e.insert(v);
@@ -72,7 +72,7 @@ LL | | m.insert(k, v);
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) {
LL + e.insert(v);
@@ -91,7 +91,7 @@ LL | | m.insert(k, v2);
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ match m.entry(k) {
LL + std::collections::hash_map::Entry::Vacant(e) => {
@@ -113,7 +113,7 @@ LL | | m.insert(k, v)
LL | | };
| |_____^
|
-help: try this
+help: try
|
LL ~ match m.entry(k) {
LL + std::collections::hash_map::Entry::Occupied(mut e) => {
@@ -137,7 +137,7 @@ LL | | None
LL | | };
| |_____^
|
-help: try this
+help: try
|
LL ~ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) {
LL + foo();
diff --git a/src/tools/clippy/tests/ui/error_impl_error.rs b/src/tools/clippy/tests/ui/error_impl_error.rs
new file mode 100644
index 000000000..40ce4181b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/error_impl_error.rs
@@ -0,0 +1,90 @@
+#![allow(unused)]
+#![warn(clippy::error_impl_error)]
+#![no_main]
+
+pub mod a {
+ #[derive(Debug)]
+ pub struct Error;
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ todo!()
+ }
+ }
+
+ impl std::error::Error for Error {}
+}
+
+mod b {
+ #[derive(Debug)]
+ pub(super) enum Error {}
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ todo!()
+ }
+ }
+
+ impl std::error::Error for Error {}
+}
+
+pub mod c {
+ pub union Error {
+ a: u32,
+ b: u32,
+ }
+
+ impl std::fmt::Debug for Error {
+ fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ todo!()
+ }
+ }
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ todo!()
+ }
+ }
+
+ impl std::error::Error for Error {}
+}
+
+pub mod d {
+ pub type Error = std::fmt::Error;
+}
+
+mod e {
+ #[derive(Debug)]
+ pub(super) struct MyError;
+
+ impl std::fmt::Display for MyError {
+ fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ todo!()
+ }
+ }
+
+ impl std::error::Error for MyError {}
+}
+
+pub mod f {
+ pub type MyError = std::fmt::Error;
+}
+
+// Do not lint module-private types
+
+mod g {
+ #[derive(Debug)]
+ enum Error {}
+
+ impl std::fmt::Display for Error {
+ fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ todo!()
+ }
+ }
+
+ impl std::error::Error for Error {}
+}
+
+mod h {
+ type Error = std::fmt::Error;
+}
diff --git a/src/tools/clippy/tests/ui/error_impl_error.stderr b/src/tools/clippy/tests/ui/error_impl_error.stderr
new file mode 100644
index 000000000..f3e04b641
--- /dev/null
+++ b/src/tools/clippy/tests/ui/error_impl_error.stderr
@@ -0,0 +1,45 @@
+error: exported type named `Error` that implements `Error`
+ --> $DIR/error_impl_error.rs:7:16
+ |
+LL | pub struct Error;
+ | ^^^^^
+ |
+note: `Error` was implemented here
+ --> $DIR/error_impl_error.rs:15:5
+ |
+LL | impl std::error::Error for Error {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: `-D clippy::error-impl-error` implied by `-D warnings`
+
+error: exported type named `Error` that implements `Error`
+ --> $DIR/error_impl_error.rs:20:21
+ |
+LL | pub(super) enum Error {}
+ | ^^^^^
+ |
+note: `Error` was implemented here
+ --> $DIR/error_impl_error.rs:28:5
+ |
+LL | impl std::error::Error for Error {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: exported type named `Error` that implements `Error`
+ --> $DIR/error_impl_error.rs:32:15
+ |
+LL | pub union Error {
+ | ^^^^^
+ |
+note: `Error` was implemented here
+ --> $DIR/error_impl_error.rs:49:5
+ |
+LL | impl std::error::Error for Error {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: exported type alias named `Error` that implements `Error`
+ --> $DIR/error_impl_error.rs:53:14
+ |
+LL | pub type Error = std::fmt::Error;
+ | ^^^^^
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/eta.fixed b/src/tools/clippy/tests/ui/eta.fixed
index bf44bcb56..ddabe7616 100644
--- a/src/tools/clippy/tests/ui/eta.fixed
+++ b/src/tools/clippy/tests/ui/eta.fixed
@@ -331,7 +331,7 @@ impl dyn TestTrait + '_ {
}
// https://github.com/rust-lang/rust-clippy/issues/7746
-fn angle_brackets_and_substs() {
+fn angle_brackets_and_args() {
let array_opt: Option<&[u8; 3]> = Some(&[4, 8, 7]);
array_opt.map(<[u8; 3]>::as_slice);
@@ -345,3 +345,58 @@ fn angle_brackets_and_substs() {
let dyn_opt: Option<&dyn TestTrait> = Some(&test_struct);
dyn_opt.map(<dyn TestTrait>::method_on_dyn);
}
+
+fn _late_bound_to_early_bound_regions() {
+ struct Foo<'a>(&'a u32);
+ impl<'a> Foo<'a> {
+ fn f(x: &'a u32) -> Self {
+ Foo(x)
+ }
+ }
+ fn f(f: impl for<'a> Fn(&'a u32) -> Foo<'a>) -> Foo<'static> {
+ f(&0)
+ }
+
+ let _ = f(|x| Foo::f(x));
+
+ struct Bar;
+ impl<'a> From<&'a u32> for Bar {
+ fn from(x: &'a u32) -> Bar {
+ Bar
+ }
+ }
+ fn f2(f: impl for<'a> Fn(&'a u32) -> Bar) -> Bar {
+ f(&0)
+ }
+
+ let _ = f2(|x| <Bar>::from(x));
+
+ struct Baz<'a>(&'a u32);
+ fn f3(f: impl Fn(&u32) -> Baz<'_>) -> Baz<'static> {
+ f(&0)
+ }
+
+ let _ = f3(|x| Baz(x));
+}
+
+fn _mixed_late_bound_and_early_bound_regions() {
+ fn f<T>(t: T, f: impl Fn(T, &u32) -> u32) -> u32 {
+ f(t, &0)
+ }
+ fn f2<'a, T: 'a>(_: &'a T, y: &u32) -> u32 {
+ *y
+ }
+ let _ = f(&0, f2);
+}
+
+fn _closure_with_types() {
+ fn f<T>(x: T) -> T {
+ x
+ }
+ fn f2<T: Default>(f: impl Fn(T) -> T) -> T {
+ f(T::default())
+ }
+
+ let _ = f2(|x: u32| f(x));
+ let _ = f2(|x| -> u32 { f(x) });
+}
diff --git a/src/tools/clippy/tests/ui/eta.rs b/src/tools/clippy/tests/ui/eta.rs
index b2af4bf09..92ecff6eb 100644
--- a/src/tools/clippy/tests/ui/eta.rs
+++ b/src/tools/clippy/tests/ui/eta.rs
@@ -331,7 +331,7 @@ impl dyn TestTrait + '_ {
}
// https://github.com/rust-lang/rust-clippy/issues/7746
-fn angle_brackets_and_substs() {
+fn angle_brackets_and_args() {
let array_opt: Option<&[u8; 3]> = Some(&[4, 8, 7]);
array_opt.map(|a| a.as_slice());
@@ -345,3 +345,58 @@ fn angle_brackets_and_substs() {
let dyn_opt: Option<&dyn TestTrait> = Some(&test_struct);
dyn_opt.map(|d| d.method_on_dyn());
}
+
+fn _late_bound_to_early_bound_regions() {
+ struct Foo<'a>(&'a u32);
+ impl<'a> Foo<'a> {
+ fn f(x: &'a u32) -> Self {
+ Foo(x)
+ }
+ }
+ fn f(f: impl for<'a> Fn(&'a u32) -> Foo<'a>) -> Foo<'static> {
+ f(&0)
+ }
+
+ let _ = f(|x| Foo::f(x));
+
+ struct Bar;
+ impl<'a> From<&'a u32> for Bar {
+ fn from(x: &'a u32) -> Bar {
+ Bar
+ }
+ }
+ fn f2(f: impl for<'a> Fn(&'a u32) -> Bar) -> Bar {
+ f(&0)
+ }
+
+ let _ = f2(|x| <Bar>::from(x));
+
+ struct Baz<'a>(&'a u32);
+ fn f3(f: impl Fn(&u32) -> Baz<'_>) -> Baz<'static> {
+ f(&0)
+ }
+
+ let _ = f3(|x| Baz(x));
+}
+
+fn _mixed_late_bound_and_early_bound_regions() {
+ fn f<T>(t: T, f: impl Fn(T, &u32) -> u32) -> u32 {
+ f(t, &0)
+ }
+ fn f2<'a, T: 'a>(_: &'a T, y: &u32) -> u32 {
+ *y
+ }
+ let _ = f(&0, |x, y| f2(x, y));
+}
+
+fn _closure_with_types() {
+ fn f<T>(x: T) -> T {
+ x
+ }
+ fn f2<T: Default>(f: impl Fn(T) -> T) -> T {
+ f(T::default())
+ }
+
+ let _ = f2(|x: u32| f(x));
+ let _ = f2(|x| -> u32 { f(x) });
+}
diff --git a/src/tools/clippy/tests/ui/eta.stderr b/src/tools/clippy/tests/ui/eta.stderr
index 0ac0b901d..ff40a2074 100644
--- a/src/tools/clippy/tests/ui/eta.stderr
+++ b/src/tools/clippy/tests/ui/eta.stderr
@@ -158,5 +158,11 @@ error: redundant closure
LL | dyn_opt.map(|d| d.method_on_dyn());
| ^^^^^^^^^^^^^^^^^^^^^ help: replace the closure with the method itself: `<dyn TestTrait>::method_on_dyn`
-error: aborting due to 26 previous errors
+error: redundant closure
+ --> $DIR/eta.rs:389:19
+ |
+LL | let _ = f(&0, |x, y| f2(x, y));
+ | ^^^^^^^^^^^^^^^ help: replace the closure with the function itself: `f2`
+
+error: aborting due to 27 previous errors
diff --git a/src/tools/clippy/tests/ui/expect.stderr b/src/tools/clippy/tests/ui/expect.stderr
index be340340d..f787fa973 100644
--- a/src/tools/clippy/tests/ui/expect.stderr
+++ b/src/tools/clippy/tests/ui/expect.stderr
@@ -4,7 +4,7 @@ error: used `expect()` on an `Option` value
LL | let _ = opt.expect("");
| ^^^^^^^^^^^^^^
|
- = help: if this value is `None`, it will panic
+ = note: if this value is `None`, it will panic
= note: `-D clippy::expect-used` implied by `-D warnings`
error: used `expect()` on a `Result` value
@@ -13,7 +13,7 @@ error: used `expect()` on a `Result` value
LL | let _ = res.expect("");
| ^^^^^^^^^^^^^^
|
- = help: if this value is an `Err`, it will panic
+ = note: if this value is an `Err`, it will panic
error: used `expect_err()` on a `Result` value
--> $DIR/expect.rs:12:13
@@ -21,7 +21,7 @@ error: used `expect_err()` on a `Result` value
LL | let _ = res.expect_err("");
| ^^^^^^^^^^^^^^^^^^
|
- = help: if this value is an `Ok`, it will panic
+ = note: if this value is an `Ok`, it will panic
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/expect_fun_call.stderr b/src/tools/clippy/tests/ui/expect_fun_call.stderr
index 36fb0e5de..a621f681d 100644
--- a/src/tools/clippy/tests/ui/expect_fun_call.stderr
+++ b/src/tools/clippy/tests/ui/expect_fun_call.stderr
@@ -2,7 +2,7 @@ error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:38:26
|
LL | with_none_and_format.expect(&format!("Error {}: fake error", error_code));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))`
|
= note: `-D clippy::expect-fun-call` implied by `-D warnings`
@@ -10,85 +10,85 @@ error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:41:26
|
LL | with_none_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:44:37
|
LL | with_none_and_format_with_macro.expect(format!("Error {}: fake error", one!()).as_str());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", one!()))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("Error {}: fake error", one!()))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:54:25
|
LL | with_err_and_format.expect(&format!("Error {}: fake error", error_code));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:57:25
|
LL | with_err_and_as_str.expect(format!("Error {}: fake error", error_code).as_str());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:69:17
|
LL | Some("foo").expect(format!("{} {}", 1, 2).as_ref());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{} {}", 1, 2))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{} {}", 1, 2))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:90:21
|
LL | Some("foo").expect(&get_string());
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:91:21
|
LL | Some("foo").expect(get_string().as_ref());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:92:21
|
LL | Some("foo").expect(get_string().as_str());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:94:21
|
LL | Some("foo").expect(get_static_str());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_static_str()) })`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_static_str()) })`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:95:21
|
LL | Some("foo").expect(get_non_static_str(&0));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:99:16
|
LL | Some(true).expect(&format!("key {}, {}", 1, 2));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:105:17
|
LL | opt_ref.expect(&format!("{:?}", opt_ref));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{:?}", opt_ref))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{:?}", opt_ref))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:109:20
|
LL | format_capture.expect(&format!("{error_code}"));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{error_code}"))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}"))`
error: use of `expect` followed by a function call
--> $DIR/expect_fun_call.rs:112:30
|
LL | format_capture_and_value.expect(&format!("{error_code}, {}", 1));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{error_code}, {}", 1))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}, {}", 1))`
error: aborting due to 15 previous errors
diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
index 91863abcc..afc311e3f 100644
--- a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
+++ b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr
@@ -2,7 +2,7 @@ error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:70:19
|
LL | let _: &str = &*s;
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
|
= note: `-D clippy::explicit-auto-deref` implied by `-D warnings`
@@ -10,229 +10,229 @@ error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:71:19
|
LL | let _: &str = &*{ String::new() };
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `&{ String::new() }`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `&{ String::new() }`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:72:19
|
LL | let _: &str = &mut *{ String::new() };
- | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&mut { String::new() }`
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut { String::new() }`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:76:11
|
LL | f_str(&*s);
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:80:13
|
LL | f_str_t(&*s, &*s); // Don't lint second param.
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:83:24
|
LL | let _: &Box<i32> = &**b;
- | ^^^^ help: try this: `&b`
+ | ^^^^ help: try: `&b`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:89:7
|
LL | c(&*s);
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:95:9
|
LL | &**x
- | ^^^^ help: try this: `x`
+ | ^^^^ help: try: `x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:99:11
|
LL | { &**x }
- | ^^^^ help: try this: `x`
+ | ^^^^ help: try: `x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:103:9
|
LL | &**{ x }
- | ^^^^^^^^ help: try this: `{ x }`
+ | ^^^^^^^^ help: try: `{ x }`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:107:9
|
LL | &***x
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:124:12
|
LL | f1(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:125:12
|
LL | f2(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:126:12
|
LL | f3(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:127:27
|
LL | f4.callable_str()(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:128:12
|
LL | f5(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:129:12
|
LL | f6(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:130:27
|
LL | f7.callable_str()(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:131:25
|
LL | f8.callable_t()(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:132:12
|
LL | f9(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:133:13
|
LL | f10(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:134:26
|
LL | f11.callable_t()(&*x);
- | ^^^ help: try this: `&x`
+ | ^^^ help: try: `&x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:138:16
|
LL | let _ = S1(&*s);
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:143:21
|
LL | let _ = S2 { s: &*s };
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:159:30
|
LL | let _ = Self::S1(&**s);
- | ^^^^ help: try this: `s`
+ | ^^^^ help: try: `s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:160:35
|
LL | let _ = Self::S2 { s: &**s };
- | ^^^^ help: try this: `s`
+ | ^^^^ help: try: `s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:163:20
|
LL | let _ = E1::S1(&*s);
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:164:25
|
LL | let _ = E1::S2 { s: &*s };
- | ^^^ help: try this: `&s`
+ | ^^^ help: try: `&s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:182:13
|
LL | let _ = (*b).foo;
- | ^^^^ help: try this: `b`
+ | ^^^^ help: try: `b`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:183:13
|
LL | let _ = (**b).foo;
- | ^^^^^ help: try this: `b`
+ | ^^^^^ help: try: `b`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:198:19
|
LL | let _ = f_str(*ref_str);
- | ^^^^^^^^ help: try this: `ref_str`
+ | ^^^^^^^^ help: try: `ref_str`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:200:19
|
LL | let _ = f_str(**ref_ref_str);
- | ^^^^^^^^^^^^^ help: try this: `ref_ref_str`
+ | ^^^^^^^^^^^^^ help: try: `ref_ref_str`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:210:13
|
LL | f_str(&&*ref_str); // `needless_borrow` will suggest removing both references
- | ^^^^^^^^ help: try this: `ref_str`
+ | ^^^^^^^^ help: try: `ref_str`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:211:12
|
LL | f_str(&&**ref_str); // `needless_borrow` will suggest removing only one reference
- | ^^^^^^^^^^ help: try this: `ref_str`
+ | ^^^^^^^^^^ help: try: `ref_str`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:220:41
|
LL | let _ = || -> &'static str { return *s };
- | ^^ help: try this: `s`
+ | ^^ help: try: `s`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:239:9
|
LL | &**x
- | ^^^^ help: try this: `x`
+ | ^^^^ help: try: `x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:262:8
|
LL | c1(*x);
- | ^^ help: try this: `x`
+ | ^^ help: try: `x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:265:20
|
LL | return *x;
- | ^^ help: try this: `x`
+ | ^^ help: try: `x`
error: deref which would be done by auto-deref
--> $DIR/explicit_auto_deref.rs:267:9
|
LL | *x
- | ^^ help: try this: `x`
+ | ^^ help: try: `x`
error: aborting due to 39 previous errors
diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.fixed b/src/tools/clippy/tests/ui/explicit_deref_methods.fixed
index 4d72b58cd..4c0b0d8f2 100644
--- a/src/tools/clippy/tests/ui/explicit_deref_methods.fixed
+++ b/src/tools/clippy/tests/ui/explicit_deref_methods.fixed
@@ -4,6 +4,7 @@
#![allow(
clippy::borrow_deref_ref,
suspicious_double_ref_op,
+ noop_method_call,
clippy::explicit_auto_deref,
clippy::needless_borrow,
clippy::no_effect,
diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.rs b/src/tools/clippy/tests/ui/explicit_deref_methods.rs
index fcd945de3..bc5da35e5 100644
--- a/src/tools/clippy/tests/ui/explicit_deref_methods.rs
+++ b/src/tools/clippy/tests/ui/explicit_deref_methods.rs
@@ -4,6 +4,7 @@
#![allow(
clippy::borrow_deref_ref,
suspicious_double_ref_op,
+ noop_method_call,
clippy::explicit_auto_deref,
clippy::needless_borrow,
clippy::no_effect,
diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.stderr b/src/tools/clippy/tests/ui/explicit_deref_methods.stderr
index d025035b7..e4d2fe3a1 100644
--- a/src/tools/clippy/tests/ui/explicit_deref_methods.stderr
+++ b/src/tools/clippy/tests/ui/explicit_deref_methods.stderr
@@ -1,76 +1,76 @@
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:54:19
+ --> $DIR/explicit_deref_methods.rs:55:19
|
LL | let b: &str = a.deref();
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
|
= note: `-D clippy::explicit-deref-methods` implied by `-D warnings`
error: explicit `deref_mut` method call
- --> $DIR/explicit_deref_methods.rs:56:23
+ --> $DIR/explicit_deref_methods.rs:57:23
|
LL | let b: &mut str = a.deref_mut();
- | ^^^^^^^^^^^^^ help: try this: `&mut **a`
+ | ^^^^^^^^^^^^^ help: try: `&mut **a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:59:39
+ --> $DIR/explicit_deref_methods.rs:60:39
|
LL | let b: String = format!("{}, {}", a.deref(), a.deref());
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:59:50
+ --> $DIR/explicit_deref_methods.rs:60:50
|
LL | let b: String = format!("{}, {}", a.deref(), a.deref());
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:61:20
+ --> $DIR/explicit_deref_methods.rs:62:20
|
LL | println!("{}", a.deref());
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:64:11
+ --> $DIR/explicit_deref_methods.rs:65:11
|
LL | match a.deref() {
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:68:28
+ --> $DIR/explicit_deref_methods.rs:69:28
|
LL | let b: String = concat(a.deref());
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:70:13
+ --> $DIR/explicit_deref_methods.rs:71:13
|
LL | let b = just_return(a).deref();
- | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `just_return(a)`
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `just_return(a)`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:72:28
+ --> $DIR/explicit_deref_methods.rs:73:28
|
LL | let b: String = concat(just_return(a).deref());
- | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `just_return(a)`
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `just_return(a)`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:74:19
+ --> $DIR/explicit_deref_methods.rs:75:19
|
LL | let b: &str = a.deref().deref();
- | ^^^^^^^^^^^^^^^^^ help: try this: `&**a`
+ | ^^^^^^^^^^^^^^^^^ help: try: `&**a`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:77:13
+ --> $DIR/explicit_deref_methods.rs:78:13
|
LL | let b = opt_a.unwrap().deref();
- | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&*opt_a.unwrap()`
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*opt_a.unwrap()`
error: explicit `deref` method call
- --> $DIR/explicit_deref_methods.rs:114:31
+ --> $DIR/explicit_deref_methods.rs:115:31
|
LL | let b: &str = expr_deref!(a.deref());
- | ^^^^^^^^^ help: try this: `&*a`
+ | ^^^^^^^^^ help: try: `&*a`
error: aborting due to 12 previous errors
diff --git a/src/tools/clippy/tests/ui/explicit_write.stderr b/src/tools/clippy/tests/ui/explicit_write.stderr
index 457e9c627..b3aa7274c 100644
--- a/src/tools/clippy/tests/ui/explicit_write.stderr
+++ b/src/tools/clippy/tests/ui/explicit_write.stderr
@@ -2,7 +2,7 @@ error: use of `write!(stdout(), ...).unwrap()`
--> $DIR/explicit_write.rs:24:9
|
LL | write!(std::io::stdout(), "test").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `print!("test")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `print!("test")`
|
= note: `-D clippy::explicit-write` implied by `-D warnings`
@@ -10,73 +10,73 @@ error: use of `write!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:25:9
|
LL | write!(std::io::stderr(), "test").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprint!("test")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprint!("test")`
error: use of `writeln!(stdout(), ...).unwrap()`
--> $DIR/explicit_write.rs:26:9
|
LL | writeln!(std::io::stdout(), "test").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `println!("test")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `println!("test")`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:27:9
|
LL | writeln!(std::io::stderr(), "test").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("test")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("test")`
error: use of `stdout().write_fmt(...).unwrap()`
--> $DIR/explicit_write.rs:28:9
|
LL | std::io::stdout().write_fmt(format_args!("test")).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `print!("test")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `print!("test")`
error: use of `stderr().write_fmt(...).unwrap()`
--> $DIR/explicit_write.rs:29:9
|
LL | std::io::stderr().write_fmt(format_args!("test")).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprint!("test")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprint!("test")`
error: use of `writeln!(stdout(), ...).unwrap()`
--> $DIR/explicit_write.rs:32:9
|
LL | writeln!(std::io::stdout(), "test/ntest").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `println!("test/ntest")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `println!("test/ntest")`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:33:9
|
LL | writeln!(std::io::stderr(), "test/ntest").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("test/ntest")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("test/ntest")`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:36:9
|
LL | writeln!(std::io::stderr(), "with {}", value).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {}", value)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("with {}", value)`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:37:9
|
LL | writeln!(std::io::stderr(), "with {} {}", 2, value).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {} {}", 2, value)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("with {} {}", 2, value)`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:38:9
|
LL | writeln!(std::io::stderr(), "with {value}").unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {value}")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("with {value}")`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:39:9
|
LL | writeln!(std::io::stderr(), "macro arg {}", one!()).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("macro arg {}", one!())`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("macro arg {}", one!())`
error: use of `writeln!(stderr(), ...).unwrap()`
--> $DIR/explicit_write.rs:41:9
|
LL | writeln!(std::io::stderr(), "{:w$}", value, w = width).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("{:w$}", value, w = width)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("{:w$}", value, w = width)`
error: aborting due to 13 previous errors
diff --git a/src/tools/clippy/tests/ui/extend_with_drain.stderr b/src/tools/clippy/tests/ui/extend_with_drain.stderr
index da14ddb25..eb2dd304d 100644
--- a/src/tools/clippy/tests/ui/extend_with_drain.stderr
+++ b/src/tools/clippy/tests/ui/extend_with_drain.stderr
@@ -2,7 +2,7 @@ error: use of `extend` instead of `append` for adding the full range of a second
--> $DIR/extend_with_drain.rs:9:5
|
LL | vec2.extend(vec1.drain(..));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec2.append(&mut vec1)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec2.append(&mut vec1)`
|
= note: `-D clippy::extend-with-drain` implied by `-D warnings`
@@ -10,19 +10,19 @@ error: use of `extend` instead of `append` for adding the full range of a second
--> $DIR/extend_with_drain.rs:14:5
|
LL | vec4.extend(vec3.drain(..));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec4.append(&mut vec3)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec4.append(&mut vec3)`
error: use of `extend` instead of `append` for adding the full range of a second vector
--> $DIR/extend_with_drain.rs:18:5
|
LL | vec11.extend(return_vector().drain(..));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec11.append(&mut return_vector())`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec11.append(&mut return_vector())`
error: use of `extend` instead of `append` for adding the full range of a second vector
--> $DIR/extend_with_drain.rs:49:5
|
LL | y.extend(ref_x.drain(..));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `y.append(ref_x)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `y.append(ref_x)`
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.fixed b/src/tools/clippy/tests/ui/filter_map_bool_then.fixed
new file mode 100644
index 000000000..e5c9f783f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_bool_then.fixed
@@ -0,0 +1,58 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(
+ clippy::clone_on_copy,
+ clippy::map_identity,
+ clippy::unnecessary_lazy_evaluations,
+ clippy::unnecessary_filter_map,
+ unused
+)]
+#![warn(clippy::filter_map_bool_then)]
+
+#[macro_use]
+extern crate proc_macros;
+
+#[derive(Clone, PartialEq)]
+struct NonCopy;
+
+fn main() {
+ let v = vec![1, 2, 3, 4, 5, 6];
+ v.clone().iter().filter(|&i| (i % 2 == 0)).map(|i| i + 1);
+ v.clone().into_iter().filter(|&i| (i % 2 == 0)).map(|i| i + 1);
+ v.clone()
+ .into_iter()
+ .filter(|&i| (i % 2 == 0)).map(|i| i + 1);
+ v.clone()
+ .into_iter()
+ .filter(|&i| i != 1000)
+ .filter(|&i| (i % 2 == 0)).map(|i| i + 1);
+ v.iter()
+ .copied()
+ .filter(|&i| i != 1000)
+ .filter(|&i| (i.clone() % 2 == 0)).map(|i| i + 1);
+ // Despite this is non-copy, `is_copy` still returns true (at least now) because it's `&NonCopy`,
+ // and any `&` is `Copy`. So since we can dereference it in `filter` (since it's then `&&NonCopy`),
+ // we can lint this and still get the same input type.
+ // See: <https://doc.rust-lang.org/std/primitive.reference.html#trait-implementations-1>
+ let v = vec![NonCopy, NonCopy];
+ v.clone().iter().filter(|&i| (i == &NonCopy)).map(|i| i);
+ // Do not lint
+ let v = vec![NonCopy, NonCopy];
+ v.clone().into_iter().filter_map(|i| (i == NonCopy).then(|| i));
+ // `&mut` is `!Copy`.
+ let v = vec![NonCopy, NonCopy];
+ v.clone().iter_mut().filter_map(|i| (i == &mut NonCopy).then(|| i));
+ external! {
+ let v = vec![1, 2, 3, 4, 5, 6];
+ v.clone().into_iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ }
+ with_span! {
+ span
+ let v = vec![1, 2, 3, 4, 5, 6];
+ v.clone().into_iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ }
+}
+
+fn issue11309<'a>(iter: impl Iterator<Item = (&'a str, &'a str)>) -> Vec<&'a str> {
+ iter.filter_map(|(_, s): (&str, _)| Some(s)).collect()
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.rs b/src/tools/clippy/tests/ui/filter_map_bool_then.rs
new file mode 100644
index 000000000..7c9b99df7
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_bool_then.rs
@@ -0,0 +1,58 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(
+ clippy::clone_on_copy,
+ clippy::map_identity,
+ clippy::unnecessary_lazy_evaluations,
+ clippy::unnecessary_filter_map,
+ unused
+)]
+#![warn(clippy::filter_map_bool_then)]
+
+#[macro_use]
+extern crate proc_macros;
+
+#[derive(Clone, PartialEq)]
+struct NonCopy;
+
+fn main() {
+ let v = vec![1, 2, 3, 4, 5, 6];
+ v.clone().iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ v.clone().into_iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ v.clone()
+ .into_iter()
+ .filter_map(|i| -> Option<_> { (i % 2 == 0).then(|| i + 1) });
+ v.clone()
+ .into_iter()
+ .filter(|&i| i != 1000)
+ .filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ v.iter()
+ .copied()
+ .filter(|&i| i != 1000)
+ .filter_map(|i| (i.clone() % 2 == 0).then(|| i + 1));
+ // Despite this is non-copy, `is_copy` still returns true (at least now) because it's `&NonCopy`,
+ // and any `&` is `Copy`. So since we can dereference it in `filter` (since it's then `&&NonCopy`),
+ // we can lint this and still get the same input type.
+ // See: <https://doc.rust-lang.org/std/primitive.reference.html#trait-implementations-1>
+ let v = vec![NonCopy, NonCopy];
+ v.clone().iter().filter_map(|i| (i == &NonCopy).then(|| i));
+ // Do not lint
+ let v = vec![NonCopy, NonCopy];
+ v.clone().into_iter().filter_map(|i| (i == NonCopy).then(|| i));
+ // `&mut` is `!Copy`.
+ let v = vec![NonCopy, NonCopy];
+ v.clone().iter_mut().filter_map(|i| (i == &mut NonCopy).then(|| i));
+ external! {
+ let v = vec![1, 2, 3, 4, 5, 6];
+ v.clone().into_iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ }
+ with_span! {
+ span
+ let v = vec![1, 2, 3, 4, 5, 6];
+ v.clone().into_iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ }
+}
+
+fn issue11309<'a>(iter: impl Iterator<Item = (&'a str, &'a str)>) -> Vec<&'a str> {
+ iter.filter_map(|(_, s): (&str, _)| Some(s)).collect()
+}
diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.stderr b/src/tools/clippy/tests/ui/filter_map_bool_then.stderr
new file mode 100644
index 000000000..fffa5252e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/filter_map_bool_then.stderr
@@ -0,0 +1,40 @@
+error: usage of `bool::then` in `filter_map`
+ --> $DIR/filter_map_bool_then.rs:20:22
+ |
+LL | v.clone().iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&i| (i % 2 == 0)).map(|i| i + 1)`
+ |
+ = note: `-D clippy::filter-map-bool-then` implied by `-D warnings`
+
+error: usage of `bool::then` in `filter_map`
+ --> $DIR/filter_map_bool_then.rs:21:27
+ |
+LL | v.clone().into_iter().filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&i| (i % 2 == 0)).map(|i| i + 1)`
+
+error: usage of `bool::then` in `filter_map`
+ --> $DIR/filter_map_bool_then.rs:24:10
+ |
+LL | .filter_map(|i| -> Option<_> { (i % 2 == 0).then(|| i + 1) });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&i| (i % 2 == 0)).map(|i| i + 1)`
+
+error: usage of `bool::then` in `filter_map`
+ --> $DIR/filter_map_bool_then.rs:28:10
+ |
+LL | .filter_map(|i| (i % 2 == 0).then(|| i + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&i| (i % 2 == 0)).map(|i| i + 1)`
+
+error: usage of `bool::then` in `filter_map`
+ --> $DIR/filter_map_bool_then.rs:32:10
+ |
+LL | .filter_map(|i| (i.clone() % 2 == 0).then(|| i + 1));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&i| (i.clone() % 2 == 0)).map(|i| i + 1)`
+
+error: usage of `bool::then` in `filter_map`
+ --> $DIR/filter_map_bool_then.rs:38:22
+ |
+LL | v.clone().iter().filter_map(|i| (i == &NonCopy).then(|| i));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&i| (i == &NonCopy)).map(|i| i)`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr
index a9fc6abe8..26d9c5e19 100644
--- a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr
+++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr
@@ -2,7 +2,7 @@ error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly
--> $DIR/filter_map_next_fixable.rs:9:32
|
LL | let element: Option<i32> = a.iter().filter_map(|s| s.parse().ok()).next();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `a.iter().find_map(|s| s.parse().ok())`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.iter().find_map(|s| s.parse().ok())`
|
= note: `-D clippy::filter-map-next` implied by `-D warnings`
@@ -10,7 +10,7 @@ error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly
--> $DIR/filter_map_next_fixable.rs:22:26
|
LL | let _: Option<i32> = a.iter().filter_map(|s| s.parse().ok()).next();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `a.iter().find_map(|s| s.parse().ok())`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.iter().find_map(|s| s.parse().ok())`
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/fn_null_check.rs b/src/tools/clippy/tests/ui/fn_null_check.rs
deleted file mode 100644
index dfdea100c..000000000
--- a/src/tools/clippy/tests/ui/fn_null_check.rs
+++ /dev/null
@@ -1,22 +0,0 @@
-#![allow(unused)]
-#![warn(clippy::fn_null_check)]
-#![allow(clippy::cmp_null)]
-#![allow(clippy::needless_if)]
-#![allow(clippy::ptr_eq)]
-#![allow(clippy::zero_ptr)]
-
-pub const ZPTR: *const () = 0 as *const _;
-pub const NOT_ZPTR: *const () = 1 as *const _;
-
-fn main() {
- let fn_ptr = main;
-
- if (fn_ptr as *mut ()).is_null() {}
- if (fn_ptr as *const u8).is_null() {}
- if (fn_ptr as *const ()) == std::ptr::null() {}
- if (fn_ptr as *const ()) == (0 as *const ()) {}
- if (fn_ptr as *const ()) == ZPTR {}
-
- // no lint
- if (fn_ptr as *const ()) == NOT_ZPTR {}
-}
diff --git a/src/tools/clippy/tests/ui/fn_null_check.stderr b/src/tools/clippy/tests/ui/fn_null_check.stderr
deleted file mode 100644
index 5b9f48a96..000000000
--- a/src/tools/clippy/tests/ui/fn_null_check.stderr
+++ /dev/null
@@ -1,43 +0,0 @@
-error: function pointer assumed to be nullable, even though it isn't
- --> $DIR/fn_null_check.rs:14:8
- |
-LL | if (fn_ptr as *mut ()).is_null() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: try wrapping your function pointer type in `Option<T>` instead, and using `is_none` to check for null pointer value
- = note: `-D clippy::fn-null-check` implied by `-D warnings`
-
-error: function pointer assumed to be nullable, even though it isn't
- --> $DIR/fn_null_check.rs:15:8
- |
-LL | if (fn_ptr as *const u8).is_null() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: try wrapping your function pointer type in `Option<T>` instead, and using `is_none` to check for null pointer value
-
-error: function pointer assumed to be nullable, even though it isn't
- --> $DIR/fn_null_check.rs:16:8
- |
-LL | if (fn_ptr as *const ()) == std::ptr::null() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: try wrapping your function pointer type in `Option<T>` instead, and using `is_none` to check for null pointer value
-
-error: function pointer assumed to be nullable, even though it isn't
- --> $DIR/fn_null_check.rs:17:8
- |
-LL | if (fn_ptr as *const ()) == (0 as *const ()) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: try wrapping your function pointer type in `Option<T>` instead, and using `is_none` to check for null pointer value
-
-error: function pointer assumed to be nullable, even though it isn't
- --> $DIR/fn_null_check.rs:18:8
- |
-LL | if (fn_ptr as *const ()) == ZPTR {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: try wrapping your function pointer type in `Option<T>` instead, and using `is_none` to check for null pointer value
-
-error: aborting due to 5 previous errors
-
diff --git a/src/tools/clippy/tests/ui/format_collect.rs b/src/tools/clippy/tests/ui/format_collect.rs
new file mode 100644
index 000000000..c7f2b7b69
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_collect.rs
@@ -0,0 +1,31 @@
+#![allow(unused, dead_code)]
+#![warn(clippy::format_collect)]
+
+fn hex_encode(bytes: &[u8]) -> String {
+ bytes.iter().map(|b| format!("{b:02X}")).collect()
+}
+
+#[rustfmt::skip]
+fn hex_encode_deep(bytes: &[u8]) -> String {
+ bytes.iter().map(|b| {{{{{ format!("{b:02X}") }}}}}).collect()
+}
+
+macro_rules! fmt {
+ ($x:ident) => {
+ format!("{x:02X}", x = $x)
+ };
+}
+
+fn from_macro(bytes: &[u8]) -> String {
+ bytes.iter().map(|x| fmt!(x)).collect()
+}
+
+fn with_block() -> String {
+ (1..10)
+ .map(|s| {
+ let y = 1;
+ format!("{s} {y}")
+ })
+ .collect()
+}
+fn main() {}
diff --git a/src/tools/clippy/tests/ui/format_collect.stderr b/src/tools/clippy/tests/ui/format_collect.stderr
new file mode 100644
index 000000000..d918f1ed4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/format_collect.stderr
@@ -0,0 +1,62 @@
+error: use of `format!` to build up a string from an iterator
+ --> $DIR/format_collect.rs:5:5
+ |
+LL | bytes.iter().map(|b| format!("{b:02X}")).collect()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: call `fold` instead
+ --> $DIR/format_collect.rs:5:18
+ |
+LL | bytes.iter().map(|b| format!("{b:02X}")).collect()
+ | ^^^
+help: ... and use the `write!` macro here
+ --> $DIR/format_collect.rs:5:26
+ |
+LL | bytes.iter().map(|b| format!("{b:02X}")).collect()
+ | ^^^^^^^^^^^^^^^^^^
+ = note: this can be written more efficiently by appending to a `String` directly
+ = note: `-D clippy::format-collect` implied by `-D warnings`
+
+error: use of `format!` to build up a string from an iterator
+ --> $DIR/format_collect.rs:10:5
+ |
+LL | bytes.iter().map(|b| {{{{{ format!("{b:02X}") }}}}}).collect()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: call `fold` instead
+ --> $DIR/format_collect.rs:10:18
+ |
+LL | bytes.iter().map(|b| {{{{{ format!("{b:02X}") }}}}}).collect()
+ | ^^^
+help: ... and use the `write!` macro here
+ --> $DIR/format_collect.rs:10:32
+ |
+LL | bytes.iter().map(|b| {{{{{ format!("{b:02X}") }}}}}).collect()
+ | ^^^^^^^^^^^^^^^^^^
+ = note: this can be written more efficiently by appending to a `String` directly
+
+error: use of `format!` to build up a string from an iterator
+ --> $DIR/format_collect.rs:24:5
+ |
+LL | / (1..10)
+LL | | .map(|s| {
+LL | | let y = 1;
+LL | | format!("{s} {y}")
+LL | | })
+LL | | .collect()
+ | |__________________^
+ |
+help: call `fold` instead
+ --> $DIR/format_collect.rs:25:10
+ |
+LL | .map(|s| {
+ | ^^^
+help: ... and use the `write!` macro here
+ --> $DIR/format_collect.rs:27:13
+ |
+LL | format!("{s} {y}")
+ | ^^^^^^^^^^^^^^^^^^
+ = note: this can be written more efficiently by appending to a `String` directly
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes.fixed b/src/tools/clippy/tests/ui/four_forward_slashes.fixed
new file mode 100644
index 000000000..54b2c414b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/four_forward_slashes.fixed
@@ -0,0 +1,48 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![feature(custom_inner_attributes)]
+#![allow(unused)]
+#![warn(clippy::four_forward_slashes)]
+#![no_main]
+#![rustfmt::skip]
+
+#[macro_use]
+extern crate proc_macros;
+
+/// whoops
+fn a() {}
+
+/// whoops
+#[allow(dead_code)]
+fn b() {}
+
+/// whoops
+/// two borked comments!
+#[track_caller]
+fn c() {}
+
+fn d() {}
+
+#[test]
+/// between attributes
+#[allow(dead_code)]
+fn g() {}
+
+/// not very start of contents
+fn h() {}
+
+fn i() {
+ //// don't lint me bozo
+ todo!()
+}
+
+external! {
+ //// don't lint me bozo
+ fn e() {}
+}
+
+with_span! {
+ span
+ //// don't lint me bozo
+ fn f() {}
+}
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes.rs b/src/tools/clippy/tests/ui/four_forward_slashes.rs
new file mode 100644
index 000000000..facdc8cb1
--- /dev/null
+++ b/src/tools/clippy/tests/ui/four_forward_slashes.rs
@@ -0,0 +1,48 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![feature(custom_inner_attributes)]
+#![allow(unused)]
+#![warn(clippy::four_forward_slashes)]
+#![no_main]
+#![rustfmt::skip]
+
+#[macro_use]
+extern crate proc_macros;
+
+//// whoops
+fn a() {}
+
+//// whoops
+#[allow(dead_code)]
+fn b() {}
+
+//// whoops
+//// two borked comments!
+#[track_caller]
+fn c() {}
+
+fn d() {}
+
+#[test]
+//// between attributes
+#[allow(dead_code)]
+fn g() {}
+
+ //// not very start of contents
+fn h() {}
+
+fn i() {
+ //// don't lint me bozo
+ todo!()
+}
+
+external! {
+ //// don't lint me bozo
+ fn e() {}
+}
+
+with_span! {
+ span
+ //// don't lint me bozo
+ fn f() {}
+}
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes.stderr b/src/tools/clippy/tests/ui/four_forward_slashes.stderr
new file mode 100644
index 000000000..89162e6b0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/four_forward_slashes.stderr
@@ -0,0 +1,68 @@
+error: this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't
+ --> $DIR/four_forward_slashes.rs:12:1
+ |
+LL | / //// whoops
+LL | | fn a() {}
+ | |_
+ |
+ = note: `-D clippy::four-forward-slashes` implied by `-D warnings`
+help: make this a doc comment by removing one `/`
+ |
+LL + /// whoops
+ |
+
+error: this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't
+ --> $DIR/four_forward_slashes.rs:15:1
+ |
+LL | / //// whoops
+LL | | #[allow(dead_code)]
+LL | | fn b() {}
+ | |_
+ |
+help: make this a doc comment by removing one `/`
+ |
+LL + /// whoops
+ |
+
+error: this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't
+ --> $DIR/four_forward_slashes.rs:19:1
+ |
+LL | / //// whoops
+LL | | //// two borked comments!
+LL | | #[track_caller]
+LL | | fn c() {}
+ | |_
+ |
+help: turn these into doc comments by removing one `/`
+ |
+LL + /// whoops
+LL ~ /// two borked comments!
+ |
+
+error: this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't
+ --> $DIR/four_forward_slashes.rs:27:1
+ |
+LL | / //// between attributes
+LL | | #[allow(dead_code)]
+LL | | fn g() {}
+ | |_
+ |
+help: make this a doc comment by removing one `/`
+ |
+LL + /// between attributes
+ |
+
+error: this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't
+ --> $DIR/four_forward_slashes.rs:31:1
+ |
+LL | / //// not very start of contents
+LL | | fn h() {}
+ | |_
+ |
+help: make this a doc comment by removing one `/`
+ |
+LL + /// not very start of contents
+ |
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes_first_line.fixed b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.fixed
new file mode 100644
index 000000000..ce272b4c6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.fixed
@@ -0,0 +1,7 @@
+/// borked doc comment on the first line. doesn't combust!
+fn a() {}
+
+//@run-rustfix
+// This test's entire purpose is to make sure we don't panic if the comment with four slashes
+// extends to the first line of the file. This is likely pretty rare in production, but an ICE is an
+// ICE.
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes_first_line.rs b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.rs
new file mode 100644
index 000000000..d8f82d441
--- /dev/null
+++ b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.rs
@@ -0,0 +1,7 @@
+//// borked doc comment on the first line. doesn't combust!
+fn a() {}
+
+//@run-rustfix
+// This test's entire purpose is to make sure we don't panic if the comment with four slashes
+// extends to the first line of the file. This is likely pretty rare in production, but an ICE is an
+// ICE.
diff --git a/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr
new file mode 100644
index 000000000..7944da14f
--- /dev/null
+++ b/src/tools/clippy/tests/ui/four_forward_slashes_first_line.stderr
@@ -0,0 +1,15 @@
+error: this item has comments with 4 forward slashes (`////`). These look like doc comments, but they aren't
+ --> $DIR/four_forward_slashes_first_line.rs:1:1
+ |
+LL | / //// borked doc comment on the first line. doesn't combust!
+LL | | fn a() {}
+ | |_
+ |
+ = note: `-D clippy::four-forward-slashes` implied by `-D warnings`
+help: make this a doc comment by removing one `/`
+ |
+LL + /// borked doc comment on the first line. doesn't combust!
+ |
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/get_first.fixed b/src/tools/clippy/tests/ui/get_first.fixed
index a29c0918a..bc2f86566 100644
--- a/src/tools/clippy/tests/ui/get_first.fixed
+++ b/src/tools/clippy/tests/ui/get_first.fixed
@@ -1,9 +1,7 @@
//@run-rustfix
#![warn(clippy::get_first)]
#![allow(clippy::useless_vec)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, HashMap, VecDeque};
struct Bar {
arr: [u32; 3],
diff --git a/src/tools/clippy/tests/ui/get_first.rs b/src/tools/clippy/tests/ui/get_first.rs
index 2062f3ec2..bc0e233fd 100644
--- a/src/tools/clippy/tests/ui/get_first.rs
+++ b/src/tools/clippy/tests/ui/get_first.rs
@@ -1,9 +1,7 @@
//@run-rustfix
#![warn(clippy::get_first)]
#![allow(clippy::useless_vec)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, HashMap, VecDeque};
struct Bar {
arr: [u32; 3],
diff --git a/src/tools/clippy/tests/ui/get_first.stderr b/src/tools/clippy/tests/ui/get_first.stderr
index 4e267ba9a..0899a5905 100644
--- a/src/tools/clippy/tests/ui/get_first.stderr
+++ b/src/tools/clippy/tests/ui/get_first.stderr
@@ -1,5 +1,5 @@
error: accessing first element with `x.get(0)`
- --> $DIR/get_first.rs:20:13
+ --> $DIR/get_first.rs:18:13
|
LL | let _ = x.get(0); // Use x.first()
| ^^^^^^^^ help: try: `x.first()`
@@ -7,13 +7,13 @@ LL | let _ = x.get(0); // Use x.first()
= note: `-D clippy::get-first` implied by `-D warnings`
error: accessing first element with `y.get(0)`
- --> $DIR/get_first.rs:25:13
+ --> $DIR/get_first.rs:23:13
|
LL | let _ = y.get(0); // Use y.first()
| ^^^^^^^^ help: try: `y.first()`
error: accessing first element with `z.get(0)`
- --> $DIR/get_first.rs:30:13
+ --> $DIR/get_first.rs:28:13
|
LL | let _ = z.get(0); // Use z.first()
| ^^^^^^^^ help: try: `z.first()`
diff --git a/src/tools/clippy/tests/ui/get_unwrap.fixed b/src/tools/clippy/tests/ui/get_unwrap.fixed
index 56ee37f02..fda334407 100644
--- a/src/tools/clippy/tests/ui/get_unwrap.fixed
+++ b/src/tools/clippy/tests/ui/get_unwrap.fixed
@@ -9,9 +9,7 @@
#![warn(clippy::unwrap_used)]
#![deny(clippy::get_unwrap)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, HashMap, VecDeque};
struct GetFalsePositive {
arr: [u32; 3],
diff --git a/src/tools/clippy/tests/ui/get_unwrap.rs b/src/tools/clippy/tests/ui/get_unwrap.rs
index af3a619ad..eaf6b005a 100644
--- a/src/tools/clippy/tests/ui/get_unwrap.rs
+++ b/src/tools/clippy/tests/ui/get_unwrap.rs
@@ -9,9 +9,7 @@
#![warn(clippy::unwrap_used)]
#![deny(clippy::get_unwrap)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, HashMap, VecDeque};
struct GetFalsePositive {
arr: [u32; 3],
diff --git a/src/tools/clippy/tests/ui/get_unwrap.stderr b/src/tools/clippy/tests/ui/get_unwrap.stderr
index fd961420d..19dc9071f 100644
--- a/src/tools/clippy/tests/ui/get_unwrap.stderr
+++ b/src/tools/clippy/tests/ui/get_unwrap.stderr
@@ -1,8 +1,8 @@
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:40:17
+ --> $DIR/get_unwrap.rs:38:17
|
LL | let _ = boxed_slice.get(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&boxed_slice[1]`
|
note: the lint level is defined here
--> $DIR/get_unwrap.rs:10:9
@@ -11,205 +11,218 @@ LL | #![deny(clippy::get_unwrap)]
| ^^^^^^^^^^^^^^^^^^
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:40:17
+ --> $DIR/get_unwrap.rs:38:17
|
LL | let _ = boxed_slice.get(1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
= note: `-D clippy::unwrap-used` implied by `-D warnings`
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:41:17
+ --> $DIR/get_unwrap.rs:39:17
|
LL | let _ = some_slice.get(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_slice[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_slice[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:41:17
+ --> $DIR/get_unwrap.rs:39:17
|
LL | let _ = some_slice.get(0).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:42:17
+ --> $DIR/get_unwrap.rs:40:17
|
LL | let _ = some_vec.get(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vec[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vec[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:42:17
+ --> $DIR/get_unwrap.rs:40:17
|
LL | let _ = some_vec.get(0).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:43:17
+ --> $DIR/get_unwrap.rs:41:17
|
LL | let _ = some_vecdeque.get(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vecdeque[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vecdeque[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:43:17
+ --> $DIR/get_unwrap.rs:41:17
|
LL | let _ = some_vecdeque.get(0).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a HashMap. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:44:17
+ --> $DIR/get_unwrap.rs:42:17
|
LL | let _ = some_hashmap.get(&1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_hashmap[&1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_hashmap[&1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:44:17
+ --> $DIR/get_unwrap.rs:42:17
|
LL | let _ = some_hashmap.get(&1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a BTreeMap. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:45:17
+ --> $DIR/get_unwrap.rs:43:17
|
LL | let _ = some_btreemap.get(&1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_btreemap[&1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_btreemap[&1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:45:17
+ --> $DIR/get_unwrap.rs:43:17
|
LL | let _ = some_btreemap.get(&1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:49:21
+ --> $DIR/get_unwrap.rs:47:21
|
LL | let _: u8 = *boxed_slice.get(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:49:22
+ --> $DIR/get_unwrap.rs:47:22
|
LL | let _: u8 = *boxed_slice.get(1).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:54:9
+ --> $DIR/get_unwrap.rs:52:9
|
LL | *boxed_slice.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:54:10
+ --> $DIR/get_unwrap.rs:52:10
|
LL | *boxed_slice.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:55:9
+ --> $DIR/get_unwrap.rs:53:9
|
LL | *some_slice.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_slice[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_slice[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:55:10
+ --> $DIR/get_unwrap.rs:53:10
|
LL | *some_slice.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:56:9
+ --> $DIR/get_unwrap.rs:54:9
|
LL | *some_vec.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:56:10
+ --> $DIR/get_unwrap.rs:54:10
|
LL | *some_vec.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a VecDeque. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:57:9
+ --> $DIR/get_unwrap.rs:55:9
|
LL | *some_vecdeque.get_mut(0).unwrap() = 1;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vecdeque[0]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vecdeque[0]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:57:10
+ --> $DIR/get_unwrap.rs:55:10
|
LL | *some_vecdeque.get_mut(0).unwrap() = 1;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:69:17
+ --> $DIR/get_unwrap.rs:67:17
|
LL | let _ = some_vec.get(0..1).unwrap().to_vec();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:69:17
+ --> $DIR/get_unwrap.rs:67:17
|
LL | let _ = some_vec.get(0..1).unwrap().to_vec();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:70:17
+ --> $DIR/get_unwrap.rs:68:17
|
LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]`
error: used `unwrap()` on an `Option` value
- --> $DIR/get_unwrap.rs:70:17
+ --> $DIR/get_unwrap.rs:68:17
|
LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:80:24
+ --> $DIR/get_unwrap.rs:78:24
|
LL | let _x: &i32 = f.get(1 + 2).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `&f[1 + 2]`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `&f[1 + 2]`
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:83:18
+ --> $DIR/get_unwrap.rs:81:18
|
LL | let _x = f.get(1 + 2).unwrap().to_string();
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `f[1 + 2]`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `f[1 + 2]`
error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:86:18
+ --> $DIR/get_unwrap.rs:84:18
|
LL | let _x = f.get(1 + 2).unwrap().abs();
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `f[1 + 2]`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `f[1 + 2]`
error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise
- --> $DIR/get_unwrap.rs:103:33
+ --> $DIR/get_unwrap.rs:101:33
|
LL | let b = rest.get_mut(linidx(j, k) - linidx(i, k) - 1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&mut rest[linidx(j, k) - linidx(i, k) - 1]`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut rest[linidx(j, k) - linidx(i, k) - 1]`
error: aborting due to 30 previous errors
diff --git a/src/tools/clippy/tests/ui/if_same_then_else.rs b/src/tools/clippy/tests/ui/if_same_then_else.rs
index dad4543f8..e84b20e9f 100644
--- a/src/tools/clippy/tests/ui/if_same_then_else.rs
+++ b/src/tools/clippy/tests/ui/if_same_then_else.rs
@@ -214,4 +214,45 @@ mod issue_8836 {
}
}
+mod issue_11213 {
+ fn reproducer(x: bool) -> bool {
+ if x {
+ 0_u8.is_power_of_two()
+ } else {
+ 0_u16.is_power_of_two()
+ }
+ }
+
+ // a more obvious reproducer that shows
+ // why the code above is problematic:
+ fn v2(x: bool) -> bool {
+ trait Helper {
+ fn is_u8(&self) -> bool;
+ }
+ impl Helper for u8 {
+ fn is_u8(&self) -> bool {
+ true
+ }
+ }
+ impl Helper for u16 {
+ fn is_u8(&self) -> bool {
+ false
+ }
+ }
+
+ // this is certainly not the same code in both branches
+ // it returns a different bool depending on the branch.
+ if x { 0_u8.is_u8() } else { 0_u16.is_u8() }
+ }
+
+ fn do_lint(x: bool) -> bool {
+ // but do lint if the type of the literal is the same
+ if x {
+ 0_u8.is_power_of_two()
+ } else {
+ 0_u8.is_power_of_two()
+ }
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/if_same_then_else.stderr b/src/tools/clippy/tests/ui/if_same_then_else.stderr
index a34fc5655..774cc0868 100644
--- a/src/tools/clippy/tests/ui/if_same_then_else.stderr
+++ b/src/tools/clippy/tests/ui/if_same_then_else.stderr
@@ -108,5 +108,23 @@ LL | | bar + 1;
LL | | }
| |_____^
-error: aborting due to 5 previous errors
+error: this `if` has identical blocks
+ --> $DIR/if_same_then_else.rs:250:14
+ |
+LL | if x {
+ | ______________^
+LL | | 0_u8.is_power_of_two()
+LL | | } else {
+ | |_________^
+ |
+note: same as this
+ --> $DIR/if_same_then_else.rs:252:16
+ |
+LL | } else {
+ | ________________^
+LL | | 0_u8.is_power_of_two()
+LL | | }
+ | |_________^
+
+error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/if_same_then_else2.rs b/src/tools/clippy/tests/ui/if_same_then_else2.rs
index 0b171f21d..c545434ef 100644
--- a/src/tools/clippy/tests/ui/if_same_then_else2.rs
+++ b/src/tools/clippy/tests/ui/if_same_then_else2.rs
@@ -98,7 +98,7 @@ fn if_same_then_else2() -> Result<&'static str, ()> {
};
if true {
- //~^ ERROR: this `if` has identical blocks
+ // FIXME: should emit "this `if` has identical blocks"
Ok("foo")?;
} else {
Ok("foo")?;
diff --git a/src/tools/clippy/tests/ui/if_same_then_else2.stderr b/src/tools/clippy/tests/ui/if_same_then_else2.stderr
index 56e5f3e45..37fe787d1 100644
--- a/src/tools/clippy/tests/ui/if_same_then_else2.stderr
+++ b/src/tools/clippy/tests/ui/if_same_then_else2.stderr
@@ -83,25 +83,6 @@ LL | | };
| |_____^
error: this `if` has identical blocks
- --> $DIR/if_same_then_else2.rs:100:13
- |
-LL | if true {
- | _____________^
-LL | |
-LL | | Ok("foo")?;
-LL | | } else {
- | |_____^
- |
-note: same as this
- --> $DIR/if_same_then_else2.rs:103:12
- |
-LL | } else {
- | ____________^
-LL | | Ok("foo")?;
-LL | | }
- | |_____^
-
-error: this `if` has identical blocks
--> $DIR/if_same_then_else2.rs:124:20
|
LL | } else if true {
@@ -122,5 +103,5 @@ LL | | return Ok(&foo[0..]);
LL | | }
| |_____^
-error: aborting due to 6 previous errors
+error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/ifs_same_cond.rs b/src/tools/clippy/tests/ui/ifs_same_cond.rs
index 5c338e3c5..ad77346b7 100644
--- a/src/tools/clippy/tests/ui/ifs_same_cond.rs
+++ b/src/tools/clippy/tests/ui/ifs_same_cond.rs
@@ -46,6 +46,10 @@ fn ifs_same_cond() {
// ok, functions
} else if v.len() == 42 {
}
+
+ if let Some(env1) = option_env!("ENV1") {
+ } else if let Some(env2) = option_env!("ENV2") {
+ }
}
fn issue10272() {
diff --git a/src/tools/clippy/tests/ui/ifs_same_cond.stderr b/src/tools/clippy/tests/ui/ifs_same_cond.stderr
index 8d7093447..3f52c10b7 100644
--- a/src/tools/clippy/tests/ui/ifs_same_cond.stderr
+++ b/src/tools/clippy/tests/ui/ifs_same_cond.stderr
@@ -36,13 +36,13 @@ LL | if 2 * a == 1 {
| ^^^^^^^^^^
error: this `if` has the same condition as a previous `if`
- --> $DIR/ifs_same_cond.rs:54:15
+ --> $DIR/ifs_same_cond.rs:58:15
|
LL | } else if a.contains("ah") {
| ^^^^^^^^^^^^^^^^
|
note: same as this
- --> $DIR/ifs_same_cond.rs:53:8
+ --> $DIR/ifs_same_cond.rs:57:8
|
LL | if a.contains("ah") {
| ^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/ignored_unit_patterns.fixed b/src/tools/clippy/tests/ui/ignored_unit_patterns.fixed
new file mode 100644
index 000000000..492219fe4
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ignored_unit_patterns.fixed
@@ -0,0 +1,17 @@
+//@run-rustfix
+
+#![warn(clippy::ignored_unit_patterns)]
+#![allow(clippy::redundant_pattern_matching, clippy::single_match)]
+
+fn foo() -> Result<(), ()> {
+ unimplemented!()
+}
+
+fn main() {
+ match foo() {
+ Ok(()) => {},
+ Err(()) => {},
+ }
+ if let Ok(()) = foo() {}
+ let _ = foo().map_err(|()| todo!());
+}
diff --git a/src/tools/clippy/tests/ui/ignored_unit_patterns.rs b/src/tools/clippy/tests/ui/ignored_unit_patterns.rs
new file mode 100644
index 000000000..90af36f8e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ignored_unit_patterns.rs
@@ -0,0 +1,17 @@
+//@run-rustfix
+
+#![warn(clippy::ignored_unit_patterns)]
+#![allow(clippy::redundant_pattern_matching, clippy::single_match)]
+
+fn foo() -> Result<(), ()> {
+ unimplemented!()
+}
+
+fn main() {
+ match foo() {
+ Ok(_) => {},
+ Err(_) => {},
+ }
+ if let Ok(_) = foo() {}
+ let _ = foo().map_err(|_| todo!());
+}
diff --git a/src/tools/clippy/tests/ui/ignored_unit_patterns.stderr b/src/tools/clippy/tests/ui/ignored_unit_patterns.stderr
new file mode 100644
index 000000000..8feea3cc2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/ignored_unit_patterns.stderr
@@ -0,0 +1,28 @@
+error: matching over `()` is more explicit
+ --> $DIR/ignored_unit_patterns.rs:12:12
+ |
+LL | Ok(_) => {},
+ | ^ help: use `()` instead of `_`: `()`
+ |
+ = note: `-D clippy::ignored-unit-patterns` implied by `-D warnings`
+
+error: matching over `()` is more explicit
+ --> $DIR/ignored_unit_patterns.rs:13:13
+ |
+LL | Err(_) => {},
+ | ^ help: use `()` instead of `_`: `()`
+
+error: matching over `()` is more explicit
+ --> $DIR/ignored_unit_patterns.rs:15:15
+ |
+LL | if let Ok(_) = foo() {}
+ | ^ help: use `()` instead of `_`: `()`
+
+error: matching over `()` is more explicit
+ --> $DIR/ignored_unit_patterns.rs:16:28
+ |
+LL | let _ = foo().map_err(|_| todo!());
+ | ^ help: use `()` instead of `_`: `()`
+
+error: aborting due to 4 previous errors
+
diff --git a/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr b/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr
index 0021841aa..7bcba8ba4 100644
--- a/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr
+++ b/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr
@@ -16,7 +16,7 @@ LL | / fn clone_from(&mut self, source: &Self) {
LL | | source.clone();
LL | | *self = source.clone();
LL | | }
- | |_____^ help: remove this
+ | |_____^ help: remove it
error: incorrect implementation of `clone` on a `Copy` type
--> $DIR/incorrect_clone_impl_on_copy_type.rs:81:29
@@ -34,7 +34,7 @@ LL | / fn clone_from(&mut self, source: &Self) {
LL | | source.clone();
LL | | *self = source.clone();
LL | | }
- | |_____^ help: remove this
+ | |_____^ help: remove it
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed
new file mode 100644
index 000000000..2f51bf274
--- /dev/null
+++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed
@@ -0,0 +1,145 @@
+//@run-rustfix
+#![no_main]
+
+use std::cmp::Ordering;
+
+// lint
+
+#[derive(Eq, PartialEq)]
+struct A(u32);
+
+impl Ord for A {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for A {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
+}
+
+// do not lint
+
+#[derive(Eq, PartialEq)]
+struct B(u32);
+
+impl Ord for B {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for B {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+// lint, and give `_` a name
+
+#[derive(Eq, PartialEq)]
+struct C(u32);
+
+impl Ord for C {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for C {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
+}
+
+// do not lint derived
+
+#[derive(Eq, Ord, PartialEq, PartialOrd)]
+struct D(u32);
+
+// do not lint if ord is not manually implemented
+
+#[derive(Eq, PartialEq)]
+struct E(u32);
+
+impl PartialOrd for E {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// do not lint since ord has more restrictive bounds
+
+#[derive(Eq, PartialEq)]
+struct Uwu<A>(A);
+
+impl<A: std::fmt::Debug + Ord + PartialOrd> Ord for Uwu<A> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl<A: Ord + PartialOrd> PartialOrd for Uwu<A> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// do not lint since `Rhs` is not `Self`
+
+#[derive(Eq, PartialEq)]
+struct F(u32);
+
+impl Ord for F {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for F {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl PartialEq<u32> for F {
+ fn eq(&self, other: &u32) -> bool {
+ todo!();
+ }
+}
+
+impl PartialOrd<u32> for F {
+ fn partial_cmp(&self, other: &u32) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// #11178, do not lint
+
+#[derive(Eq, PartialEq)]
+struct G(u32);
+
+impl Ord for G {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for G {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(Self::cmp(self, other))
+ }
+}
+
+#[derive(Eq, PartialEq)]
+struct H(u32);
+
+impl Ord for H {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for H {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(Ord::cmp(self, other))
+ }
+}
diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs
new file mode 100644
index 000000000..47127bdae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs
@@ -0,0 +1,149 @@
+//@run-rustfix
+#![no_main]
+
+use std::cmp::Ordering;
+
+// lint
+
+#[derive(Eq, PartialEq)]
+struct A(u32);
+
+impl Ord for A {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for A {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// do not lint
+
+#[derive(Eq, PartialEq)]
+struct B(u32);
+
+impl Ord for B {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for B {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+// lint, and give `_` a name
+
+#[derive(Eq, PartialEq)]
+struct C(u32);
+
+impl Ord for C {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for C {
+ fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// do not lint derived
+
+#[derive(Eq, Ord, PartialEq, PartialOrd)]
+struct D(u32);
+
+// do not lint if ord is not manually implemented
+
+#[derive(Eq, PartialEq)]
+struct E(u32);
+
+impl PartialOrd for E {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// do not lint since ord has more restrictive bounds
+
+#[derive(Eq, PartialEq)]
+struct Uwu<A>(A);
+
+impl<A: std::fmt::Debug + Ord + PartialOrd> Ord for Uwu<A> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl<A: Ord + PartialOrd> PartialOrd for Uwu<A> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// do not lint since `Rhs` is not `Self`
+
+#[derive(Eq, PartialEq)]
+struct F(u32);
+
+impl Ord for F {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for F {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl PartialEq<u32> for F {
+ fn eq(&self, other: &u32) -> bool {
+ todo!();
+ }
+}
+
+impl PartialOrd<u32> for F {
+ fn partial_cmp(&self, other: &u32) -> Option<Ordering> {
+ todo!();
+ }
+}
+
+// #11178, do not lint
+
+#[derive(Eq, PartialEq)]
+struct G(u32);
+
+impl Ord for G {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for G {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(Self::cmp(self, other))
+ }
+}
+
+#[derive(Eq, PartialEq)]
+struct H(u32);
+
+impl Ord for H {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for H {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(Ord::cmp(self, other))
+ }
+}
diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr
new file mode 100644
index 000000000..66048fc90
--- /dev/null
+++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr
@@ -0,0 +1,31 @@
+error: incorrect implementation of `partial_cmp` on an `Ord` type
+ --> $DIR/incorrect_partial_ord_impl_on_ord_type.rs:17:1
+ |
+LL | / impl PartialOrd for A {
+LL | | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ | | _____________________________________________________________-
+LL | || todo!();
+LL | || }
+ | ||_____- help: change this to: `{ Some(self.cmp(other)) }`
+LL | | }
+ | |__^
+ |
+ = note: `#[deny(clippy::incorrect_partial_ord_impl_on_ord_type)]` on by default
+
+error: incorrect implementation of `partial_cmp` on an `Ord` type
+ --> $DIR/incorrect_partial_ord_impl_on_ord_type.rs:51:1
+ |
+LL | / impl PartialOrd for C {
+LL | | fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
+LL | | todo!();
+LL | | }
+LL | | }
+ | |_^
+ |
+help: change this to
+ |
+LL | fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
+ | ~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.rs b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.rs
new file mode 100644
index 000000000..3a3b84f93
--- /dev/null
+++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.rs
@@ -0,0 +1,51 @@
+// This test's filename is... a bit verbose. But it ensures we suggest the correct code when `Ord`
+// is not in scope.
+#![no_main]
+#![no_implicit_prelude]
+
+extern crate std;
+
+use std::cmp::{self, Eq, Ordering, PartialEq, PartialOrd};
+use std::option::Option::{self, Some};
+use std::todo;
+
+// lint
+
+#[derive(Eq, PartialEq)]
+struct A(u32);
+
+impl cmp::Ord for A {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for A {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ // NOTE: This suggestion is wrong, as `Ord` is not in scope. But this should be fine as it isn't
+ // automatically applied
+ todo!();
+ }
+}
+
+#[derive(Eq, PartialEq)]
+struct B(u32);
+
+impl B {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl cmp::Ord for B {
+ fn cmp(&self, other: &Self) -> Ordering {
+ todo!();
+ }
+}
+
+impl PartialOrd for B {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ // This calls `B.cmp`, not `Ord::cmp`!
+ Some(self.cmp(other))
+ }
+}
diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.stderr b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.stderr
new file mode 100644
index 000000000..f4374c281
--- /dev/null
+++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type_fully_qual.stderr
@@ -0,0 +1,31 @@
+error: incorrect implementation of `partial_cmp` on an `Ord` type
+ --> $DIR/incorrect_partial_ord_impl_on_ord_type_fully_qual.rs:23:1
+ |
+LL | / impl PartialOrd for A {
+LL | | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ | | _____________________________________________________________-
+LL | || // NOTE: This suggestion is wrong, as `Ord` is not in scope. But this should be fine as it isn't
+LL | || // automatically applied
+LL | || todo!();
+LL | || }
+ | ||_____- help: change this to: `{ Some(self.cmp(other)) }`
+LL | | }
+ | |__^
+ |
+ = note: `#[deny(clippy::incorrect_partial_ord_impl_on_ord_type)]` on by default
+
+error: incorrect implementation of `partial_cmp` on an `Ord` type
+ --> $DIR/incorrect_partial_ord_impl_on_ord_type_fully_qual.rs:46:1
+ |
+LL | / impl PartialOrd for B {
+LL | | fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ | | _____________________________________________________________-
+LL | || // This calls `B.cmp`, not `Ord::cmp`!
+LL | || Some(self.cmp(other))
+LL | || }
+ | ||_____- help: change this to: `{ Some(std::cmp::Ord::cmp(self, other)) }`
+LL | | }
+ | |__^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr b/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr
index f8a50f022..004260a1d 100644
--- a/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr
+++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr
@@ -4,7 +4,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p
LL | / let data = match wrapper {
LL | | SingleVariantEnum::Variant(i) => i,
LL | | };
- | |______^ help: try this: `let SingleVariantEnum::Variant(data) = wrapper;`
+ | |______^ help: try: `let SingleVariantEnum::Variant(data) = wrapper;`
|
= note: `-D clippy::infallible-destructuring-match` implied by `-D warnings`
@@ -14,7 +14,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p
LL | / let data = match wrapper {
LL | | TupleStruct(i) => i,
LL | | };
- | |______^ help: try this: `let TupleStruct(data) = wrapper;`
+ | |______^ help: try: `let TupleStruct(data) = wrapper;`
error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let`
--> $DIR/infallible_destructuring_match.rs:85:5
@@ -22,7 +22,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p
LL | / let data = match wrapper {
LL | | TupleStructWithNonCopy(ref n) => n,
LL | | };
- | |______^ help: try this: `let TupleStructWithNonCopy(ref data) = wrapper;`
+ | |______^ help: try: `let TupleStructWithNonCopy(ref data) = wrapper;`
error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let`
--> $DIR/infallible_destructuring_match.rs:104:5
@@ -30,7 +30,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p
LL | / let data = match wrapper {
LL | | Ok(i) => i,
LL | | };
- | |______^ help: try this: `let Ok(data) = wrapper;`
+ | |______^ help: try: `let Ok(data) = wrapper;`
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/inherent_to_string.rs b/src/tools/clippy/tests/ui/inherent_to_string.rs
index aeb0a0c1e..adb0389a0 100644
--- a/src/tools/clippy/tests/ui/inherent_to_string.rs
+++ b/src/tools/clippy/tests/ui/inherent_to_string.rs
@@ -1,5 +1,4 @@
-#![warn(clippy::inherent_to_string)]
-#![deny(clippy::inherent_to_string_shadow_display)]
+#![allow(improper_ctypes_definitions)]
use std::fmt;
@@ -14,6 +13,9 @@ struct D;
struct E;
struct F;
struct G;
+struct H;
+struct I;
+struct J;
impl A {
// Should be detected; emit warning
@@ -80,6 +82,26 @@ impl G {
}
}
+// Issue #11201
+
+impl H {
+ unsafe fn to_string(&self) -> String {
+ "G.to_string()".to_string()
+ }
+}
+
+impl I {
+ extern "C" fn to_string(&self) -> String {
+ "G.to_string()".to_string()
+ }
+}
+
+impl J {
+ unsafe extern "C" fn to_string(&self) -> String {
+ "G.to_string()".to_string()
+ }
+}
+
fn main() {
let a = A;
a.to_string();
diff --git a/src/tools/clippy/tests/ui/inherent_to_string.stderr b/src/tools/clippy/tests/ui/inherent_to_string.stderr
index 443fecae1..579b3c8c5 100644
--- a/src/tools/clippy/tests/ui/inherent_to_string.stderr
+++ b/src/tools/clippy/tests/ui/inherent_to_string.stderr
@@ -1,5 +1,5 @@
error: implementation of inherent method `to_string(&self) -> String` for type `A`
- --> $DIR/inherent_to_string.rs:20:5
+ --> $DIR/inherent_to_string.rs:22:5
|
LL | / fn to_string(&self) -> String {
LL | | "A.to_string()".to_string()
@@ -10,7 +10,7 @@ LL | | }
= note: `-D clippy::inherent-to-string` implied by `-D warnings`
error: type `C` implements inherent method `to_string(&self) -> String` which shadows the implementation of `Display`
- --> $DIR/inherent_to_string.rs:44:5
+ --> $DIR/inherent_to_string.rs:46:5
|
LL | / fn to_string(&self) -> String {
LL | | "C.to_string()".to_string()
@@ -18,11 +18,7 @@ LL | | }
| |_____^
|
= help: remove the inherent method from type `C`
-note: the lint level is defined here
- --> $DIR/inherent_to_string.rs:2:9
- |
-LL | #![deny(clippy::inherent_to_string_shadow_display)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: `#[deny(clippy::inherent_to_string_shadow_display)]` on by default
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/issue-7447.rs b/src/tools/clippy/tests/ui/issue-7447.rs
index fdb77f322..de4362c4d 100644
--- a/src/tools/clippy/tests/ui/issue-7447.rs
+++ b/src/tools/clippy/tests/ui/issue-7447.rs
@@ -1,4 +1,7 @@
-use std::{borrow::Cow, collections::BTreeMap, marker::PhantomData, sync::Arc};
+use std::borrow::Cow;
+use std::collections::BTreeMap;
+use std::marker::PhantomData;
+use std::sync::Arc;
fn byte_view<'a>(s: &'a ByteView<'_>) -> BTreeMap<&'a str, ByteView<'a>> {
panic!()
diff --git a/src/tools/clippy/tests/ui/issue-7447.stderr b/src/tools/clippy/tests/ui/issue-7447.stderr
index 8d8c29f13..7a113740c 100644
--- a/src/tools/clippy/tests/ui/issue-7447.stderr
+++ b/src/tools/clippy/tests/ui/issue-7447.stderr
@@ -1,5 +1,5 @@
error: sub-expression diverges
- --> $DIR/issue-7447.rs:23:15
+ --> $DIR/issue-7447.rs:26:15
|
LL | byte_view(panic!());
| ^^^^^^^^
@@ -8,7 +8,7 @@ LL | byte_view(panic!());
= note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
error: sub-expression diverges
- --> $DIR/issue-7447.rs:24:19
+ --> $DIR/issue-7447.rs:27:19
|
LL | group_entries(panic!());
| ^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed
index 2baea06f8..636f572a3 100644
--- a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed
+++ b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed
@@ -3,8 +3,7 @@
#![allow(unused)]
#![allow(clippy::useless_vec)]
-use std::collections::HashSet;
-use std::collections::VecDeque;
+use std::collections::{HashSet, VecDeque};
fn main() {
let v = [1, 2, 3, 4, 5];
diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.rs b/src/tools/clippy/tests/ui/iter_cloned_collect.rs
index 9eac94eb8..518cb75af 100644
--- a/src/tools/clippy/tests/ui/iter_cloned_collect.rs
+++ b/src/tools/clippy/tests/ui/iter_cloned_collect.rs
@@ -3,8 +3,7 @@
#![allow(unused)]
#![allow(clippy::useless_vec)]
-use std::collections::HashSet;
-use std::collections::VecDeque;
+use std::collections::{HashSet, VecDeque};
fn main() {
let v = [1, 2, 3, 4, 5];
diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr
index b38cf547d..b2cc497bf 100644
--- a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr
+++ b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr
@@ -1,5 +1,5 @@
error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
- --> $DIR/iter_cloned_collect.rs:11:27
+ --> $DIR/iter_cloned_collect.rs:10:27
|
LL | let v2: Vec<isize> = v.iter().cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
@@ -7,13 +7,13 @@ LL | let v2: Vec<isize> = v.iter().cloned().collect();
= note: `-D clippy::iter-cloned-collect` implied by `-D warnings`
error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
- --> $DIR/iter_cloned_collect.rs:16:38
+ --> $DIR/iter_cloned_collect.rs:15:38
|
LL | let _: Vec<isize> = vec![1, 2, 3].iter().cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
- --> $DIR/iter_cloned_collect.rs:21:24
+ --> $DIR/iter_cloned_collect.rs:20:24
|
LL | .to_bytes()
| ________________________^
@@ -23,13 +23,13 @@ LL | | .collect();
| |______________________^ help: try: `.to_vec()`
error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
- --> $DIR/iter_cloned_collect.rs:29:24
+ --> $DIR/iter_cloned_collect.rs:28:24
|
LL | let _: Vec<_> = arr.iter().cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
error: called `iter().copied().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable
- --> $DIR/iter_cloned_collect.rs:32:26
+ --> $DIR/iter_cloned_collect.rs:31:26
|
LL | let _: Vec<isize> = v.iter().copied().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()`
diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr
index dcae7cecd..eaac48be8 100644
--- a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr
+++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr
@@ -4,7 +4,7 @@ error: unnecessarily eager cloning of iterator items
LL | let _: Option<String> = vec.iter().cloned().last();
| ^^^^^^^^^^----------------
| |
- | help: try this: `.last().cloned()`
+ | help: try: `.last().cloned()`
|
= note: `-D clippy::iter-overeager-cloned` implied by `-D warnings`
@@ -14,7 +14,7 @@ error: unnecessarily eager cloning of iterator items
LL | let _: Option<String> = vec.iter().chain(vec.iter()).cloned().next();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------
| |
- | help: try this: `.next().cloned()`
+ | help: try: `.next().cloned()`
error: unneeded cloning of iterator items
--> $DIR/iter_overeager_cloned.rs:12:20
@@ -22,7 +22,7 @@ error: unneeded cloning of iterator items
LL | let _: usize = vec.iter().filter(|x| x == &"2").cloned().count();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-----------------
| |
- | help: try this: `.count()`
+ | help: try: `.count()`
|
= note: `-D clippy::redundant-clone` implied by `-D warnings`
@@ -32,7 +32,7 @@ error: unnecessarily eager cloning of iterator items
LL | let _: Vec<_> = vec.iter().cloned().take(2).collect();
| ^^^^^^^^^^-----------------
| |
- | help: try this: `.take(2).cloned()`
+ | help: try: `.take(2).cloned()`
error: unnecessarily eager cloning of iterator items
--> $DIR/iter_overeager_cloned.rs:16:21
@@ -40,7 +40,7 @@ error: unnecessarily eager cloning of iterator items
LL | let _: Vec<_> = vec.iter().cloned().skip(2).collect();
| ^^^^^^^^^^-----------------
| |
- | help: try this: `.skip(2).cloned()`
+ | help: try: `.skip(2).cloned()`
error: unnecessarily eager cloning of iterator items
--> $DIR/iter_overeager_cloned.rs:18:13
@@ -48,7 +48,7 @@ error: unnecessarily eager cloning of iterator items
LL | let _ = vec.iter().filter(|x| x == &"2").cloned().nth(2);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------
| |
- | help: try this: `.nth(2).cloned()`
+ | help: try: `.nth(2).cloned()`
error: unnecessarily eager cloning of iterator items
--> $DIR/iter_overeager_cloned.rs:20:13
@@ -60,7 +60,7 @@ LL | | .cloned()
LL | | .flatten();
| |__________________^
|
-help: try this
+help: try
|
LL ~ .iter()
LL ~ .flatten().cloned();
diff --git a/src/tools/clippy/tests/ui/iter_skip_zero.fixed b/src/tools/clippy/tests/ui/iter_skip_zero.fixed
new file mode 100644
index 000000000..1eb0984fe
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_zero.fixed
@@ -0,0 +1,25 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(clippy::useless_vec, unused)]
+#![warn(clippy::iter_skip_zero)]
+
+#[macro_use]
+extern crate proc_macros;
+
+use std::iter::once;
+
+fn main() {
+ let _ = [1, 2, 3].iter().skip(1);
+ let _ = vec![1, 2, 3].iter().skip(1);
+ let _ = once([1, 2, 3]).skip(1);
+ let _ = vec![1, 2, 3].iter().chain([1, 2, 3].iter().skip(1)).skip(1);
+ // Don't lint
+ let _ = [1, 2, 3].iter().skip(1);
+ let _ = vec![1, 2, 3].iter().skip(1);
+ external! {
+ let _ = [1, 2, 3].iter().skip(0);
+ }
+ with_span! {
+ let _ = [1, 2, 3].iter().skip(0);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/iter_skip_zero.rs b/src/tools/clippy/tests/ui/iter_skip_zero.rs
new file mode 100644
index 000000000..8c103ab1d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_zero.rs
@@ -0,0 +1,25 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(clippy::useless_vec, unused)]
+#![warn(clippy::iter_skip_zero)]
+
+#[macro_use]
+extern crate proc_macros;
+
+use std::iter::once;
+
+fn main() {
+ let _ = [1, 2, 3].iter().skip(0);
+ let _ = vec![1, 2, 3].iter().skip(0);
+ let _ = once([1, 2, 3]).skip(0);
+ let _ = vec![1, 2, 3].iter().chain([1, 2, 3].iter().skip(0)).skip(0);
+ // Don't lint
+ let _ = [1, 2, 3].iter().skip(1);
+ let _ = vec![1, 2, 3].iter().skip(1);
+ external! {
+ let _ = [1, 2, 3].iter().skip(0);
+ }
+ with_span! {
+ let _ = [1, 2, 3].iter().skip(0);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/iter_skip_zero.stderr b/src/tools/clippy/tests/ui/iter_skip_zero.stderr
new file mode 100644
index 000000000..80fecd59e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/iter_skip_zero.stderr
@@ -0,0 +1,43 @@
+error: usage of `.skip(0)`
+ --> $DIR/iter_skip_zero.rs:12:35
+ |
+LL | let _ = [1, 2, 3].iter().skip(0);
+ | ^ help: if you meant to skip the first element, use: `1`
+ |
+ = note: this call to `skip` does nothing and is useless; remove it
+ = note: `-D clippy::iter-skip-zero` implied by `-D warnings`
+
+error: usage of `.skip(0)`
+ --> $DIR/iter_skip_zero.rs:13:39
+ |
+LL | let _ = vec![1, 2, 3].iter().skip(0);
+ | ^ help: if you meant to skip the first element, use: `1`
+ |
+ = note: this call to `skip` does nothing and is useless; remove it
+
+error: usage of `.skip(0)`
+ --> $DIR/iter_skip_zero.rs:14:34
+ |
+LL | let _ = once([1, 2, 3]).skip(0);
+ | ^ help: if you meant to skip the first element, use: `1`
+ |
+ = note: this call to `skip` does nothing and is useless; remove it
+
+error: usage of `.skip(0)`
+ --> $DIR/iter_skip_zero.rs:15:71
+ |
+LL | let _ = vec![1, 2, 3].iter().chain([1, 2, 3].iter().skip(0)).skip(0);
+ | ^ help: if you meant to skip the first element, use: `1`
+ |
+ = note: this call to `skip` does nothing and is useless; remove it
+
+error: usage of `.skip(0)`
+ --> $DIR/iter_skip_zero.rs:15:62
+ |
+LL | let _ = vec![1, 2, 3].iter().chain([1, 2, 3].iter().skip(0)).skip(0);
+ | ^ help: if you meant to skip the first element, use: `1`
+ |
+ = note: this call to `skip` does nothing and is useless; remove it
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/iter_with_drain.stderr b/src/tools/clippy/tests/ui/iter_with_drain.stderr
index aa394439f..bfaed29a0 100644
--- a/src/tools/clippy/tests/ui/iter_with_drain.stderr
+++ b/src/tools/clippy/tests/ui/iter_with_drain.stderr
@@ -2,7 +2,7 @@ error: `drain(..)` used on a `Vec`
--> $DIR/iter_with_drain.rs:11:34
|
LL | let mut a: BinaryHeap<_> = a.drain(..).collect();
- | ^^^^^^^^^ help: try this: `into_iter()`
+ | ^^^^^^^^^ help: try: `into_iter()`
|
= note: `-D clippy::iter-with-drain` implied by `-D warnings`
@@ -10,31 +10,31 @@ error: `drain(..)` used on a `VecDeque`
--> $DIR/iter_with_drain.rs:14:27
|
LL | let mut a: Vec<_> = a.drain(..).collect();
- | ^^^^^^^^^ help: try this: `into_iter()`
+ | ^^^^^^^^^ help: try: `into_iter()`
error: `drain(..)` used on a `Vec`
--> $DIR/iter_with_drain.rs:15:34
|
LL | let mut a: HashMap<_, _> = a.drain(..).map(|x| (x.clone(), x)).collect();
- | ^^^^^^^^^ help: try this: `into_iter()`
+ | ^^^^^^^^^ help: try: `into_iter()`
error: `drain(..)` used on a `Vec`
--> $DIR/iter_with_drain.rs:21:34
|
LL | let mut a: BinaryHeap<_> = a.drain(0..).collect();
- | ^^^^^^^^^^ help: try this: `into_iter()`
+ | ^^^^^^^^^^ help: try: `into_iter()`
error: `drain(..)` used on a `VecDeque`
--> $DIR/iter_with_drain.rs:24:27
|
LL | let mut a: Vec<_> = a.drain(..a.len()).collect();
- | ^^^^^^^^^^^^^^^^ help: try this: `into_iter()`
+ | ^^^^^^^^^^^^^^^^ help: try: `into_iter()`
error: `drain(..)` used on a `Vec`
--> $DIR/iter_with_drain.rs:25:34
|
LL | let mut a: HashMap<_, _> = a.drain(0..a.len()).map(|x| (x.clone(), x)).collect();
- | ^^^^^^^^^^^^^^^^^ help: try this: `into_iter()`
+ | ^^^^^^^^^^^^^^^^^ help: try: `into_iter()`
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/let_and_return.rs b/src/tools/clippy/tests/ui/let_and_return.rs
index bb162adc9..64665cc90 100644
--- a/src/tools/clippy/tests/ui/let_and_return.rs
+++ b/src/tools/clippy/tests/ui/let_and_return.rs
@@ -1,6 +1,8 @@
#![allow(unused)]
#![warn(clippy::let_and_return)]
+use std::cell::RefCell;
+
fn test() -> i32 {
let _y = 0; // no warning
let x = 5;
@@ -65,45 +67,46 @@ macro_rules! tuple_encode {
);
}
+fn issue_3792() -> String {
+ use std::io::{self, BufRead, Stdin};
+
+ let stdin = io::stdin();
+ // `Stdin::lock` returns `StdinLock<'static>` so `line` doesn't borrow from `stdin`
+ // https://github.com/rust-lang/rust/pull/93965
+ let line = stdin.lock().lines().next().unwrap().unwrap();
+ line
+}
+
tuple_encode!(T0, T1, T2, T3, T4, T5, T6, T7);
mod no_lint_if_stmt_borrows {
- mod issue_3792 {
- use std::io::{self, BufRead, Stdin};
+ use std::cell::RefCell;
+ use std::rc::{Rc, Weak};
+ struct Bar;
- fn read_line() -> String {
- let stdin = io::stdin();
- let line = stdin.lock().lines().next().unwrap().unwrap();
- line
+ impl Bar {
+ fn new() -> Self {
+ Bar {}
}
- }
-
- mod issue_3324 {
- use std::cell::RefCell;
- use std::rc::{Rc, Weak};
-
- fn test(value: Weak<RefCell<Bar>>) -> u32 {
- let value = value.upgrade().unwrap();
- let ret = value.borrow().baz();
- ret
+ fn baz(&self) -> u32 {
+ 0
}
+ }
- struct Bar;
+ fn issue_3324(value: Weak<RefCell<Bar>>) -> u32 {
+ let value = value.upgrade().unwrap();
+ let ret = value.borrow().baz();
+ ret
+ }
- impl Bar {
- fn new() -> Self {
- Bar {}
- }
- fn baz(&self) -> u32 {
- 0
- }
+ fn borrows_in_closure(value: Weak<RefCell<Bar>>) -> u32 {
+ fn f(mut x: impl FnMut() -> u32) -> impl FnMut() -> u32 {
+ x
}
- fn main() {
- let a = Rc::new(RefCell::new(Bar::new()));
- let b = Rc::downgrade(&a);
- test(b);
- }
+ let value = value.upgrade().unwrap();
+ let ret = f(|| value.borrow().baz())();
+ ret
}
mod free_function {
@@ -166,4 +169,14 @@ mod issue_5729 {
}
}
+// https://github.com/rust-lang/rust-clippy/issues/11167
+macro_rules! fn_in_macro {
+ ($b:block) => {
+ fn f() -> usize $b
+ }
+}
+fn_in_macro!({
+ return 1;
+});
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/let_and_return.stderr b/src/tools/clippy/tests/ui/let_and_return.stderr
index 17fd694bf..4ca0a05c8 100644
--- a/src/tools/clippy/tests/ui/let_and_return.stderr
+++ b/src/tools/clippy/tests/ui/let_and_return.stderr
@@ -1,5 +1,5 @@
error: returning the result of a `let` binding from a block
- --> $DIR/let_and_return.rs:7:5
+ --> $DIR/let_and_return.rs:9:5
|
LL | let x = 5;
| ---------- unnecessary `let` binding
@@ -14,7 +14,7 @@ LL ~ 5
|
error: returning the result of a `let` binding from a block
- --> $DIR/let_and_return.rs:13:9
+ --> $DIR/let_and_return.rs:15:9
|
LL | let x = 5;
| ---------- unnecessary `let` binding
@@ -28,7 +28,21 @@ LL ~ 5
|
error: returning the result of a `let` binding from a block
- --> $DIR/let_and_return.rs:164:13
+ --> $DIR/let_and_return.rs:77:5
+ |
+LL | let line = stdin.lock().lines().next().unwrap().unwrap();
+ | --------------------------------------------------------- unnecessary `let` binding
+LL | line
+ | ^^^^
+ |
+help: return the expression directly
+ |
+LL ~
+LL ~ stdin.lock().lines().next().unwrap().unwrap()
+ |
+
+error: returning the result of a `let` binding from a block
+ --> $DIR/let_and_return.rs:167:13
|
LL | let clone = Arc::clone(&self.foo);
| ---------------------------------- unnecessary `let` binding
@@ -41,5 +55,5 @@ LL ~
LL ~ Arc::clone(&self.foo) as _
|
-error: aborting due to 3 previous errors
+error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/let_underscore_lock.rs b/src/tools/clippy/tests/ui/let_underscore_lock.rs
index 4dff4d766..87f12e278 100644
--- a/src/tools/clippy/tests/ui/let_underscore_lock.rs
+++ b/src/tools/clippy/tests/ui/let_underscore_lock.rs
@@ -3,7 +3,8 @@
extern crate parking_lot;
fn main() {
- use parking_lot::{lock_api::RawMutex, Mutex, RwLock};
+ use parking_lot::lock_api::RawMutex;
+ use parking_lot::{Mutex, RwLock};
let p_m: Mutex<()> = Mutex::const_new(RawMutex::INIT, ());
let _ = p_m.lock();
diff --git a/src/tools/clippy/tests/ui/let_underscore_lock.stderr b/src/tools/clippy/tests/ui/let_underscore_lock.stderr
index f137d4112..5027e6b3c 100644
--- a/src/tools/clippy/tests/ui/let_underscore_lock.stderr
+++ b/src/tools/clippy/tests/ui/let_underscore_lock.stderr
@@ -1,5 +1,5 @@
error: non-binding `let` on a synchronization lock
- --> $DIR/let_underscore_lock.rs:9:5
+ --> $DIR/let_underscore_lock.rs:10:5
|
LL | let _ = p_m.lock();
| ^^^^^^^^^^^^^^^^^^^
@@ -8,7 +8,7 @@ LL | let _ = p_m.lock();
= note: `-D clippy::let-underscore-lock` implied by `-D warnings`
error: non-binding `let` on a synchronization lock
- --> $DIR/let_underscore_lock.rs:12:5
+ --> $DIR/let_underscore_lock.rs:13:5
|
LL | let _ = p_m1.lock();
| ^^^^^^^^^^^^^^^^^^^^
@@ -16,7 +16,7 @@ LL | let _ = p_m1.lock();
= help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
error: non-binding `let` on a synchronization lock
- --> $DIR/let_underscore_lock.rs:15:5
+ --> $DIR/let_underscore_lock.rs:16:5
|
LL | let _ = p_rw.read();
| ^^^^^^^^^^^^^^^^^^^^
@@ -24,7 +24,7 @@ LL | let _ = p_rw.read();
= help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop`
error: non-binding `let` on a synchronization lock
- --> $DIR/let_underscore_lock.rs:16:5
+ --> $DIR/let_underscore_lock.rs:17:5
|
LL | let _ = p_rw.write();
| ^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/let_underscore_untyped.rs b/src/tools/clippy/tests/ui/let_underscore_untyped.rs
index 431d83778..18630c27f 100644
--- a/src/tools/clippy/tests/ui/let_underscore_untyped.rs
+++ b/src/tools/clippy/tests/ui/let_underscore_untyped.rs
@@ -7,8 +7,9 @@ extern crate proc_macros;
use proc_macros::with_span;
use clippy_utils::is_from_proc_macro;
+use std::boxed::Box;
+use std::fmt::Display;
use std::future::Future;
-use std::{boxed::Box, fmt::Display};
fn a() -> u32 {
1
diff --git a/src/tools/clippy/tests/ui/let_underscore_untyped.stderr b/src/tools/clippy/tests/ui/let_underscore_untyped.stderr
index bbf2508af..e0c39b6ee 100644
--- a/src/tools/clippy/tests/ui/let_underscore_untyped.stderr
+++ b/src/tools/clippy/tests/ui/let_underscore_untyped.stderr
@@ -1,60 +1,60 @@
error: non-binding `let` without a type annotation
- --> $DIR/let_underscore_untyped.rs:50:5
+ --> $DIR/let_underscore_untyped.rs:51:5
|
LL | let _ = a();
| ^^^^^^^^^^^^
|
help: consider adding a type annotation
- --> $DIR/let_underscore_untyped.rs:50:10
+ --> $DIR/let_underscore_untyped.rs:51:10
|
LL | let _ = a();
| ^
= note: `-D clippy::let-underscore-untyped` implied by `-D warnings`
error: non-binding `let` without a type annotation
- --> $DIR/let_underscore_untyped.rs:51:5
+ --> $DIR/let_underscore_untyped.rs:52:5
|
LL | let _ = b(1);
| ^^^^^^^^^^^^^
|
help: consider adding a type annotation
- --> $DIR/let_underscore_untyped.rs:51:10
+ --> $DIR/let_underscore_untyped.rs:52:10
|
LL | let _ = b(1);
| ^
error: non-binding `let` without a type annotation
- --> $DIR/let_underscore_untyped.rs:53:5
+ --> $DIR/let_underscore_untyped.rs:54:5
|
LL | let _ = d(&1);
| ^^^^^^^^^^^^^^
|
help: consider adding a type annotation
- --> $DIR/let_underscore_untyped.rs:53:10
+ --> $DIR/let_underscore_untyped.rs:54:10
|
LL | let _ = d(&1);
| ^
error: non-binding `let` without a type annotation
- --> $DIR/let_underscore_untyped.rs:54:5
+ --> $DIR/let_underscore_untyped.rs:55:5
|
LL | let _ = e();
| ^^^^^^^^^^^^
|
help: consider adding a type annotation
- --> $DIR/let_underscore_untyped.rs:54:10
+ --> $DIR/let_underscore_untyped.rs:55:10
|
LL | let _ = e();
| ^
error: non-binding `let` without a type annotation
- --> $DIR/let_underscore_untyped.rs:55:5
+ --> $DIR/let_underscore_untyped.rs:56:5
|
LL | let _ = f();
| ^^^^^^^^^^^^
|
help: consider adding a type annotation
- --> $DIR/let_underscore_untyped.rs:55:10
+ --> $DIR/let_underscore_untyped.rs:56:10
|
LL | let _ = f();
| ^
diff --git a/src/tools/clippy/tests/ui/manual_filter.stderr b/src/tools/clippy/tests/ui/manual_filter.stderr
index 53dea9229..f62d3e960 100644
--- a/src/tools/clippy/tests/ui/manual_filter.stderr
+++ b/src/tools/clippy/tests/ui/manual_filter.stderr
@@ -8,7 +8,7 @@ LL | | if x > 0 {
... |
LL | | },
LL | | };
- | |_____^ help: try this: `Some(0).filter(|&x| x <= 0)`
+ | |_____^ help: try: `Some(0).filter(|&x| x <= 0)`
|
= note: `-D clippy::manual-filter` implied by `-D warnings`
@@ -22,7 +22,7 @@ LL | | None
... |
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(1).filter(|&x| x <= 0)`
+ | |_____^ help: try: `Some(1).filter(|&x| x <= 0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:29:5
@@ -34,7 +34,7 @@ LL | | None
... |
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(2).filter(|&x| x <= 0)`
+ | |_____^ help: try: `Some(2).filter(|&x| x <= 0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:40:5
@@ -46,7 +46,7 @@ LL | | Some(x)
... |
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(3).filter(|&x| x > 0)`
+ | |_____^ help: try: `Some(3).filter(|&x| x > 0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:52:5
@@ -58,7 +58,7 @@ LL | | Some(x) => {
... |
LL | | },
LL | | };
- | |_____^ help: try this: `y.filter(|&x| x <= 0)`
+ | |_____^ help: try: `y.filter(|&x| x <= 0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:64:5
@@ -70,7 +70,7 @@ LL | | Some(x)
... |
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(5).filter(|&x| x > 0)`
+ | |_____^ help: try: `Some(5).filter(|&x| x > 0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:75:5
@@ -82,7 +82,7 @@ LL | | Some(x)
... |
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(6).as_ref().filter(|&x| x > &0)`
+ | |_____^ help: try: `Some(6).as_ref().filter(|&x| x > &0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:87:5
@@ -94,7 +94,7 @@ LL | | Some(x)
... |
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(String::new()).filter(|x| external_cond)`
+ | |_____^ help: try: `Some(String::new()).filter(|x| external_cond)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:98:5
@@ -104,7 +104,7 @@ LL | | if external_cond { Some(x) } else { None }
LL | | } else {
LL | | None
LL | | };
- | |_____^ help: try this: `Some(7).filter(|&x| external_cond)`
+ | |_____^ help: try: `Some(7).filter(|&x| external_cond)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:104:5
@@ -116,7 +116,7 @@ LL | | Some(x)
... |
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(8).filter(|&x| x != 0)`
+ | |_____^ help: try: `Some(8).filter(|&x| x != 0)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:115:5
@@ -128,7 +128,7 @@ LL | | Some(x)
... |
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(9).filter(|&x| x > 10 && x < 100)`
+ | |_____^ help: try: `Some(9).filter(|&x| x > 10 && x < 100)`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:141:5
@@ -142,7 +142,7 @@ LL | | None => None,
LL | | };
| |_____^
|
-help: try this
+help: try
|
LL ~ Some(11).filter(|&x| {
LL + println!("foo");
@@ -161,7 +161,7 @@ LL | | Some(x)
... |
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(14).filter(|&x| unsafe { f(x) })`
+ | |_____^ help: try: `Some(14).filter(|&x| unsafe { f(x) })`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:195:13
@@ -173,7 +173,7 @@ LL | | if f(x) { Some(x) } else { None }
LL | | },
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(15).filter(|&x| unsafe { f(x) })`
+ | |_____^ help: try: `Some(15).filter(|&x| unsafe { f(x) })`
error: manual implementation of `Option::filter`
--> $DIR/manual_filter.rs:205:12
@@ -185,7 +185,7 @@ LL | | if x % 2 == 0 { Some(x) } else { None }
LL | | } else {
LL | | None
LL | | };
- | |_____^ help: try this: `{ Some(16).filter(|&x| x % 2 == 0) }`
+ | |_____^ help: try: `{ Some(16).filter(|&x| x % 2 == 0) }`
error: aborting due to 15 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_filter_map.fixed b/src/tools/clippy/tests/ui/manual_filter_map.fixed
index 9dd376df2..35872a39a 100644
--- a/src/tools/clippy/tests/ui/manual_filter_map.fixed
+++ b/src/tools/clippy/tests/ui/manual_filter_map.fixed
@@ -120,3 +120,27 @@ fn issue_8920() {
.iter()
.filter_map(|f| f.result_field.to_owned().ok());
}
+
+fn issue8010() {
+ #[derive(Clone)]
+ enum Enum {
+ A(i32),
+ B,
+ }
+
+ let iter = [Enum::A(123), Enum::B].into_iter();
+
+ let _x = iter.clone().filter_map(|x| match x { Enum::A(s) => Some(s), _ => None });
+ let _x = iter.clone().filter(|x| matches!(x, Enum::B)).map(|x| match x {
+ Enum::A(s) => s,
+ _ => unreachable!(),
+ });
+ let _x = iter
+ .clone()
+ .filter_map(|x| match x { Enum::A(s) => Some(s), _ => None });
+ #[allow(clippy::unused_unit)]
+ let _x = iter
+ .clone()
+ .filter(|x| matches!(x, Enum::B))
+ .map(|x| if let Enum::B = x { () } else { unreachable!() });
+}
diff --git a/src/tools/clippy/tests/ui/manual_filter_map.rs b/src/tools/clippy/tests/ui/manual_filter_map.rs
index 6dd1e066a..50d8d2722 100644
--- a/src/tools/clippy/tests/ui/manual_filter_map.rs
+++ b/src/tools/clippy/tests/ui/manual_filter_map.rs
@@ -133,3 +133,31 @@ fn issue_8920() {
.filter(|f| f.result_field.is_ok())
.map(|f| f.result_field.to_owned().unwrap());
}
+
+fn issue8010() {
+ #[derive(Clone)]
+ enum Enum {
+ A(i32),
+ B,
+ }
+
+ let iter = [Enum::A(123), Enum::B].into_iter();
+
+ let _x = iter.clone().filter(|x| matches!(x, Enum::A(_))).map(|x| match x {
+ Enum::A(s) => s,
+ _ => unreachable!(),
+ });
+ let _x = iter.clone().filter(|x| matches!(x, Enum::B)).map(|x| match x {
+ Enum::A(s) => s,
+ _ => unreachable!(),
+ });
+ let _x = iter
+ .clone()
+ .filter(|x| matches!(x, Enum::A(_)))
+ .map(|x| if let Enum::A(s) = x { s } else { unreachable!() });
+ #[allow(clippy::unused_unit)]
+ let _x = iter
+ .clone()
+ .filter(|x| matches!(x, Enum::B))
+ .map(|x| if let Enum::B = x { () } else { unreachable!() });
+}
diff --git a/src/tools/clippy/tests/ui/manual_filter_map.stderr b/src/tools/clippy/tests/ui/manual_filter_map.stderr
index 882468b0f..0e8672c02 100644
--- a/src/tools/clippy/tests/ui/manual_filter_map.stderr
+++ b/src/tools/clippy/tests/ui/manual_filter_map.stderr
@@ -4,6 +4,11 @@ error: `filter(..).map(..)` can be simplified as `filter_map(..)`
LL | let _ = (0..).filter(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `filter_map(|a| to_opt(a))`
|
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:9:30
+ |
+LL | let _ = (0..).filter(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
+ | ^^^^^^^^^^
= note: `-D clippy::manual-filter-map` implied by `-D warnings`
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
@@ -11,12 +16,24 @@ error: `filter(..).map(..)` can be simplified as `filter_map(..)`
|
LL | let _ = (0..).filter(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `filter_map(|a| to_opt(a))`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:12:31
+ |
+LL | let _ = (0..).filter(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
+ | ^^^^^^^^^
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
--> $DIR/manual_filter_map.rs:15:19
|
LL | let _ = (0..).filter(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `filter_map(|a| to_res(a).ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:15:31
+ |
+LL | let _ = (0..).filter(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
+ | ^^^^^^^^^
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
--> $DIR/manual_filter_map.rs:18:10
@@ -25,6 +42,12 @@ LL | .filter(|&x| to_ref(to_opt(x)).is_some())
| __________^
LL | | .map(|y| to_ref(to_opt(y)).unwrap());
| |____________________________________________^ help: try: `filter_map(|y| *to_ref(to_opt(y)))`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:18:22
+ |
+LL | .filter(|&x| to_ref(to_opt(x)).is_some())
+ | ^^^^^^^^^^^^^^^^^
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
--> $DIR/manual_filter_map.rs:21:10
@@ -33,6 +56,12 @@ LL | .filter(|x| to_ref(to_opt(*x)).is_some())
| __________^
LL | | .map(|y| to_ref(to_opt(y)).unwrap());
| |____________________________________________^ help: try: `filter_map(|y| *to_ref(to_opt(y)))`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:21:21
+ |
+LL | .filter(|x| to_ref(to_opt(*x)).is_some())
+ | ^^^^^^^^^^^^^^^^^^
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
--> $DIR/manual_filter_map.rs:25:10
@@ -41,6 +70,12 @@ LL | .filter(|&x| to_ref(to_res(x)).is_ok())
| __________^
LL | | .map(|y| to_ref(to_res(y)).unwrap());
| |____________________________________________^ help: try: `filter_map(|y| to_ref(to_res(y)).ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:25:22
+ |
+LL | .filter(|&x| to_ref(to_res(x)).is_ok())
+ | ^^^^^^^^^^^^^^^^^
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
--> $DIR/manual_filter_map.rs:28:10
@@ -49,6 +84,12 @@ LL | .filter(|x| to_ref(to_res(*x)).is_ok())
| __________^
LL | | .map(|y| to_ref(to_res(y)).unwrap());
| |____________________________________________^ help: try: `filter_map(|y| to_ref(to_res(y)).ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:28:21
+ |
+LL | .filter(|x| to_ref(to_res(*x)).is_ok())
+ | ^^^^^^^^^^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_filter_map.rs:34:27
@@ -75,6 +116,12 @@ error: `find(..).map(..)` can be simplified as `find_map(..)`
|
LL | iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:37:41
+ |
+LL | iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_filter_map.rs:39:30
@@ -117,6 +164,12 @@ error: `find(..).map(..)` can be simplified as `find_map(..)`
|
LL | iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned().ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_filter_map.rs:45:45
+ |
+LL | iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^
error: `filter(..).map(..)` can be simplified as `filter_map(..)`
--> $DIR/manual_filter_map.rs:93:10
@@ -190,5 +243,23 @@ LL | .filter(|f| f.result_field.is_ok())
LL | | .map(|f| f.result_field.to_owned().unwrap());
| |____________________________________________________^ help: try: `filter_map(|f| f.result_field.to_owned().ok())`
-error: aborting due to 27 previous errors
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:146:27
+ |
+LL | let _x = iter.clone().filter(|x| matches!(x, Enum::A(_))).map(|x| match x {
+ | ___________________________^
+LL | | Enum::A(s) => s,
+LL | | _ => unreachable!(),
+LL | | });
+ | |______^ help: try: `filter_map(|x| match x { Enum::A(s) => Some(s), _ => None })`
+
+error: `filter(..).map(..)` can be simplified as `filter_map(..)`
+ --> $DIR/manual_filter_map.rs:156:10
+ |
+LL | .filter(|x| matches!(x, Enum::A(_)))
+ | __________^
+LL | | .map(|x| if let Enum::A(s) = x { s } else { unreachable!() });
+ | |_____________________________________________________________________^ help: try: `filter_map(|x| match x { Enum::A(s) => Some(s), _ => None })`
+
+error: aborting due to 29 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_find_map.stderr b/src/tools/clippy/tests/ui/manual_find_map.stderr
index 693a06bb5..4e52b5efa 100644
--- a/src/tools/clippy/tests/ui/manual_find_map.stderr
+++ b/src/tools/clippy/tests/ui/manual_find_map.stderr
@@ -4,6 +4,11 @@ error: `find(..).map(..)` can be simplified as `find_map(..)`
LL | let _ = (0..).find(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|a| to_opt(a))`
|
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:9:28
+ |
+LL | let _ = (0..).find(|n| to_opt(*n).is_some()).map(|a| to_opt(a).unwrap());
+ | ^^^^^^^^^^
= note: `-D clippy::manual-find-map` implied by `-D warnings`
error: `find(..).map(..)` can be simplified as `find_map(..)`
@@ -11,12 +16,24 @@ error: `find(..).map(..)` can be simplified as `find_map(..)`
|
LL | let _ = (0..).find(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|a| to_opt(a))`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:12:29
+ |
+LL | let _ = (0..).find(|&n| to_opt(n).is_some()).map(|a| to_opt(a).expect("hi"));
+ | ^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:15:19
|
LL | let _ = (0..).find(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|a| to_res(a).ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:15:29
+ |
+LL | let _ = (0..).find(|&n| to_res(n).is_ok()).map(|a| to_res(a).unwrap_or(1));
+ | ^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:18:10
@@ -25,6 +42,12 @@ LL | .find(|&x| to_ref(to_opt(x)).is_some())
| __________^
LL | | .map(|y| to_ref(to_opt(y)).unwrap());
| |____________________________________________^ help: try: `find_map(|y| *to_ref(to_opt(y)))`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:18:20
+ |
+LL | .find(|&x| to_ref(to_opt(x)).is_some())
+ | ^^^^^^^^^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:21:10
@@ -33,6 +56,12 @@ LL | .find(|x| to_ref(to_opt(*x)).is_some())
| __________^
LL | | .map(|y| to_ref(to_opt(y)).unwrap());
| |____________________________________________^ help: try: `find_map(|y| *to_ref(to_opt(y)))`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:21:19
+ |
+LL | .find(|x| to_ref(to_opt(*x)).is_some())
+ | ^^^^^^^^^^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:25:10
@@ -41,6 +70,12 @@ LL | .find(|&x| to_ref(to_res(x)).is_ok())
| __________^
LL | | .map(|y| to_ref(to_res(y)).unwrap());
| |____________________________________________^ help: try: `find_map(|y| to_ref(to_res(y)).ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:25:20
+ |
+LL | .find(|&x| to_ref(to_res(x)).is_ok())
+ | ^^^^^^^^^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:28:10
@@ -49,6 +84,12 @@ LL | .find(|x| to_ref(to_res(*x)).is_ok())
| __________^
LL | | .map(|y| to_ref(to_res(y)).unwrap());
| |____________________________________________^ help: try: `find_map(|y| to_ref(to_res(y)).ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:28:19
+ |
+LL | .find(|x| to_ref(to_res(*x)).is_ok())
+ | ^^^^^^^^^^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:34:26
@@ -91,6 +132,12 @@ error: `find(..).map(..)` can be simplified as `find_map(..)`
|
LL | iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:40:41
+ |
+LL | iter::<Option<&String>>().find(|&x| to_ref(x).is_some()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:42:30
@@ -133,6 +180,12 @@ error: `find(..).map(..)` can be simplified as `find_map(..)`
|
LL | iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `find_map(|y| to_ref(y).cloned().ok())`
+ |
+note: the suggestion might change the behavior of the program when merging `filter` and `map`, because this expression potentially contains side effects and will only execute once
+ --> $DIR/manual_find_map.rs:48:45
+ |
+LL | iter::<Result<&String, ()>>().find(|&x| to_ref(x).is_ok()).map(|y| to_ref(y).cloned().unwrap());
+ | ^^^^^^^^^
error: `find(..).map(..)` can be simplified as `find_map(..)`
--> $DIR/manual_find_map.rs:96:10
diff --git a/src/tools/clippy/tests/ui/manual_float_methods.rs b/src/tools/clippy/tests/ui/manual_float_methods.rs
new file mode 100644
index 000000000..af9076cfb
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_float_methods.rs
@@ -0,0 +1,55 @@
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(clippy::needless_if, unused)]
+#![warn(clippy::manual_is_infinite, clippy::manual_is_finite)]
+#![feature(inline_const)]
+
+#[macro_use]
+extern crate proc_macros;
+
+const INFINITE: f32 = f32::INFINITY;
+const NEG_INFINITE: f32 = f32::NEG_INFINITY;
+
+fn fn_test() -> f64 {
+ f64::NEG_INFINITY
+}
+
+fn fn_test_not_inf() -> f64 {
+ 112.0
+}
+
+fn main() {
+ let x = 1.0f32;
+ if x == f32::INFINITY || x == f32::NEG_INFINITY {}
+ if x != f32::INFINITY && x != f32::NEG_INFINITY {}
+ if x == INFINITE || x == NEG_INFINITE {}
+ if x != INFINITE && x != NEG_INFINITE {}
+ let x = 1.0f64;
+ if x == f64::INFINITY || x == f64::NEG_INFINITY {}
+ if x != f64::INFINITY && x != f64::NEG_INFINITY {}
+ // Don't lint
+ if x.is_infinite() {}
+ if x.is_finite() {}
+ if x.abs() < f64::INFINITY {}
+ if f64::INFINITY > x.abs() {}
+ if f64::abs(x) < f64::INFINITY {}
+ if f64::INFINITY > f64::abs(x) {}
+ // Is not evaluated by `clippy_utils::constant`
+ if x != f64::INFINITY && x != fn_test() {}
+ // Not -inf
+ if x != f64::INFINITY && x != fn_test_not_inf() {}
+ const X: f64 = 1.0f64;
+ // Will be linted if `const_float_classify` is enabled
+ if const { X == f64::INFINITY || X == f64::NEG_INFINITY } {}
+ if const { X != f64::INFINITY && X != f64::NEG_INFINITY } {}
+ external! {
+ let x = 1.0;
+ if x == f32::INFINITY || x == f32::NEG_INFINITY {}
+ if x != f32::INFINITY && x != f32::NEG_INFINITY {}
+ }
+ with_span! {
+ span
+ let x = 1.0;
+ if x == f32::INFINITY || x == f32::NEG_INFINITY {}
+ if x != f32::INFINITY && x != f32::NEG_INFINITY {}
+ }
+}
diff --git a/src/tools/clippy/tests/ui/manual_float_methods.stderr b/src/tools/clippy/tests/ui/manual_float_methods.stderr
new file mode 100644
index 000000000..a56118b31
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_float_methods.stderr
@@ -0,0 +1,80 @@
+error: manually checking if a float is infinite
+ --> $DIR/manual_float_methods.rs:22:8
+ |
+LL | if x == f32::INFINITY || x == f32::NEG_INFINITY {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the dedicated method instead: `x.is_infinite()`
+ |
+ = note: `-D clippy::manual-is-infinite` implied by `-D warnings`
+
+error: manually checking if a float is finite
+ --> $DIR/manual_float_methods.rs:23:8
+ |
+LL | if x != f32::INFINITY && x != f32::NEG_INFINITY {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::manual-is-finite` implied by `-D warnings`
+help: use the dedicated method instead
+ |
+LL | if x.is_finite() {}
+ | ~~~~~~~~~~~~~
+help: this will alter how it handles NaN; if that is a problem, use instead
+ |
+LL | if x.is_finite() || x.is_nan() {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: or, for conciseness
+ |
+LL | if !x.is_infinite() {}
+ | ~~~~~~~~~~~~~~~~
+
+error: manually checking if a float is infinite
+ --> $DIR/manual_float_methods.rs:24:8
+ |
+LL | if x == INFINITE || x == NEG_INFINITE {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the dedicated method instead: `x.is_infinite()`
+
+error: manually checking if a float is finite
+ --> $DIR/manual_float_methods.rs:25:8
+ |
+LL | if x != INFINITE && x != NEG_INFINITE {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use the dedicated method instead
+ |
+LL | if x.is_finite() {}
+ | ~~~~~~~~~~~~~
+help: this will alter how it handles NaN; if that is a problem, use instead
+ |
+LL | if x.is_finite() || x.is_nan() {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: or, for conciseness
+ |
+LL | if !x.is_infinite() {}
+ | ~~~~~~~~~~~~~~~~
+
+error: manually checking if a float is infinite
+ --> $DIR/manual_float_methods.rs:27:8
+ |
+LL | if x == f64::INFINITY || x == f64::NEG_INFINITY {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the dedicated method instead: `x.is_infinite()`
+
+error: manually checking if a float is finite
+ --> $DIR/manual_float_methods.rs:28:8
+ |
+LL | if x != f64::INFINITY && x != f64::NEG_INFINITY {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use the dedicated method instead
+ |
+LL | if x.is_finite() {}
+ | ~~~~~~~~~~~~~
+help: this will alter how it handles NaN; if that is a problem, use instead
+ |
+LL | if x.is_finite() || x.is_nan() {}
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: or, for conciseness
+ |
+LL | if !x.is_infinite() {}
+ | ~~~~~~~~~~~~~~~~
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_let_else.rs b/src/tools/clippy/tests/ui/manual_let_else.rs
index 46241afec..381b83409 100644
--- a/src/tools/clippy/tests/ui/manual_let_else.rs
+++ b/src/tools/clippy/tests/ui/manual_let_else.rs
@@ -279,7 +279,9 @@ fn not_fire() {
create_binding_if_some_nf!(v, g());
// Already a let-else
- let Some(a) = (if let Some(b) = Some(Some(())) { b } else { return }) else { panic!() };
+ let Some(a) = (if let Some(b) = Some(Some(())) { b } else { return }) else {
+ panic!()
+ };
// If a type annotation is present, don't lint as
// expressing the type might be too hard
@@ -304,9 +306,7 @@ fn not_fire() {
let _x = if let Some(x) = Some(1) {
x
} else {
- let Some(_z) = Some(3) else {
- return
- };
+ let Some(_z) = Some(3) else { return };
1
};
diff --git a/src/tools/clippy/tests/ui/manual_let_else.stderr b/src/tools/clippy/tests/ui/manual_let_else.stderr
index 1eada4f99..912302b17 100644
--- a/src/tools/clippy/tests/ui/manual_let_else.stderr
+++ b/src/tools/clippy/tests/ui/manual_let_else.stderr
@@ -352,7 +352,7 @@ LL + };
|
error: this could be rewritten as `let...else`
- --> $DIR/manual_let_else.rs:297:5
+ --> $DIR/manual_let_else.rs:299:5
|
LL | / let _ = match ff {
LL | | Some(value) => value,
diff --git a/src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed b/src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed
new file mode 100644
index 000000000..02308bc7c
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed
@@ -0,0 +1,63 @@
+//@run-rustfix
+#![allow(unused_braces, unused_variables, dead_code)]
+#![allow(
+ clippy::collapsible_else_if,
+ clippy::unused_unit,
+ clippy::let_unit_value,
+ clippy::match_single_binding,
+ clippy::never_loop
+)]
+#![warn(clippy::manual_let_else, clippy::question_mark)]
+
+enum Variant {
+ A(usize, usize),
+ B(usize),
+ C,
+}
+
+fn g() -> Option<(u8, u8)> {
+ None
+}
+
+fn e() -> Variant {
+ Variant::A(0, 0)
+}
+
+fn main() {}
+
+fn foo() -> Option<()> {
+ // Fire here, normal case
+ let v = g()?;
+
+ // Don't fire here, the pattern is refutable
+ let Variant::A(v, w) = e() else { return None };
+
+ // Fire here, the pattern is irrefutable
+ let (v, w) = g()?;
+
+ // Don't fire manual_let_else in this instance: question mark can be used instead.
+ let v = g()?;
+
+ // Do fire manual_let_else in this instance: question mark cannot be used here due to the return
+ // body.
+ let Some(v) = g() else {
+ return Some(());
+ };
+
+ // Here we could also fire the question_mark lint, but we don't (as it's a match and not an if let).
+ // So we still emit manual_let_else here. For the *resulting* code, we *do* emit the question_mark
+ // lint, so for rustfix reasons, we allow the question_mark lint here.
+ #[allow(clippy::question_mark)]
+ {
+ let Some(v) = g() else { return None };
+ }
+
+ // This is a copy of the case above where we'd fire the question_mark lint, but here we have allowed
+ // it. Make sure that manual_let_else is fired as the fallback.
+ #[allow(clippy::question_mark)]
+ {
+ let Some(v) = g() else { return None };
+ }
+
+ Some(())
+}
diff --git a/src/tools/clippy/tests/ui/manual_let_else_question_mark.rs b/src/tools/clippy/tests/ui/manual_let_else_question_mark.rs
new file mode 100644
index 000000000..9c7ad386d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_let_else_question_mark.rs
@@ -0,0 +1,68 @@
+//@run-rustfix
+#![allow(unused_braces, unused_variables, dead_code)]
+#![allow(
+ clippy::collapsible_else_if,
+ clippy::unused_unit,
+ clippy::let_unit_value,
+ clippy::match_single_binding,
+ clippy::never_loop
+)]
+#![warn(clippy::manual_let_else, clippy::question_mark)]
+
+enum Variant {
+ A(usize, usize),
+ B(usize),
+ C,
+}
+
+fn g() -> Option<(u8, u8)> {
+ None
+}
+
+fn e() -> Variant {
+ Variant::A(0, 0)
+}
+
+fn main() {}
+
+fn foo() -> Option<()> {
+ // Fire here, normal case
+ let Some(v) = g() else { return None };
+
+ // Don't fire here, the pattern is refutable
+ let Variant::A(v, w) = e() else { return None };
+
+ // Fire here, the pattern is irrefutable
+ let Some((v, w)) = g() else { return None };
+
+ // Don't fire manual_let_else in this instance: question mark can be used instead.
+ let v = if let Some(v_some) = g() { v_some } else { return None };
+
+ // Do fire manual_let_else in this instance: question mark cannot be used here due to the return
+ // body.
+ let v = if let Some(v_some) = g() {
+ v_some
+ } else {
+ return Some(());
+ };
+
+ // Here we could also fire the question_mark lint, but we don't (as it's a match and not an if let).
+ // So we still emit manual_let_else here. For the *resulting* code, we *do* emit the question_mark
+ // lint, so for rustfix reasons, we allow the question_mark lint here.
+ #[allow(clippy::question_mark)]
+ {
+ let v = match g() {
+ Some(v_some) => v_some,
+ _ => return None,
+ };
+ }
+
+ // This is a copy of the case above where we'd fire the question_mark lint, but here we have allowed
+ // it. Make sure that manual_let_else is fired as the fallback.
+ #[allow(clippy::question_mark)]
+ {
+ let v = if let Some(v_some) = g() { v_some } else { return None };
+ }
+
+ Some(())
+}
diff --git a/src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr b/src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr
new file mode 100644
index 000000000..d7d2e127e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr
@@ -0,0 +1,55 @@
+error: this `let...else` may be rewritten with the `?` operator
+ --> $DIR/manual_let_else_question_mark.rs:30:5
+ |
+LL | let Some(v) = g() else { return None };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `let v = g()?;`
+ |
+ = note: `-D clippy::question-mark` implied by `-D warnings`
+
+error: this `let...else` may be rewritten with the `?` operator
+ --> $DIR/manual_let_else_question_mark.rs:36:5
+ |
+LL | let Some((v, w)) = g() else { return None };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `let (v, w) = g()?;`
+
+error: this block may be rewritten with the `?` operator
+ --> $DIR/manual_let_else_question_mark.rs:39:13
+ |
+LL | let v = if let Some(v_some) = g() { v_some } else { return None };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `g()?`
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else_question_mark.rs:43:5
+ |
+LL | / let v = if let Some(v_some) = g() {
+LL | | v_some
+LL | | } else {
+LL | | return Some(());
+LL | | };
+ | |______^
+ |
+ = note: `-D clippy::manual-let-else` implied by `-D warnings`
+help: consider writing
+ |
+LL ~ let Some(v) = g() else {
+LL + return Some(());
+LL + };
+ |
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else_question_mark.rs:54:9
+ |
+LL | / let v = match g() {
+LL | | Some(v_some) => v_some,
+LL | | _ => return None,
+LL | | };
+ | |__________^ help: consider writing: `let Some(v) = g() else { return None };`
+
+error: this could be rewritten as `let...else`
+ --> $DIR/manual_let_else_question_mark.rs:64:9
+ |
+LL | let v = if let Some(v_some) = g() { v_some } else { return None };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { return None };`
+
+error: aborting due to 6 previous errors
+
diff --git a/src/tools/clippy/tests/ui/manual_map_option.stderr b/src/tools/clippy/tests/ui/manual_map_option.stderr
index cdc2c0e62..3f9caad4e 100644
--- a/src/tools/clippy/tests/ui/manual_map_option.stderr
+++ b/src/tools/clippy/tests/ui/manual_map_option.stderr
@@ -5,7 +5,7 @@ LL | / match Some(0) {
LL | | Some(_) => Some(2),
LL | | None::<u32> => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|_| 2)`
+ | |_____^ help: try: `Some(0).map(|_| 2)`
|
= note: `-D clippy::manual-map` implied by `-D warnings`
@@ -16,7 +16,7 @@ LL | / match Some(0) {
LL | | Some(x) => Some(x + 1),
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| x + 1)`
+ | |_____^ help: try: `Some(0).map(|x| x + 1)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:25:5
@@ -25,7 +25,7 @@ LL | / match Some("") {
LL | | Some(x) => Some(x.is_empty()),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some("").map(|x| x.is_empty())`
+ | |_____^ help: try: `Some("").map(|x| x.is_empty())`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:30:5
@@ -35,7 +35,7 @@ LL | | Some(!x)
LL | | } else {
LL | | None
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| !x)`
+ | |_____^ help: try: `Some(0).map(|x| !x)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:37:5
@@ -44,7 +44,7 @@ LL | / match Some(0) {
LL | | Some(x) => { Some(std::convert::identity(x)) }
LL | | None => { None }
LL | | };
- | |_____^ help: try this: `Some(0).map(std::convert::identity)`
+ | |_____^ help: try: `Some(0).map(std::convert::identity)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:42:5
@@ -53,7 +53,7 @@ LL | / match Some(&String::new()) {
LL | | Some(x) => Some(str::len(x)),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(&String::new()).map(|x| str::len(x))`
+ | |_____^ help: try: `Some(&String::new()).map(|x| str::len(x))`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:52:5
@@ -62,7 +62,7 @@ LL | / match &Some([0, 1]) {
LL | | Some(x) => Some(x[0]),
LL | | &None => None,
LL | | };
- | |_____^ help: try this: `Some([0, 1]).as_ref().map(|x| x[0])`
+ | |_____^ help: try: `Some([0, 1]).as_ref().map(|x| x[0])`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:57:5
@@ -71,7 +71,7 @@ LL | / match &Some(0) {
LL | | &Some(x) => Some(x * 2),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| x * 2)`
+ | |_____^ help: try: `Some(0).map(|x| x * 2)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:62:5
@@ -80,7 +80,7 @@ LL | / match Some(String::new()) {
LL | | Some(ref x) => Some(x.is_empty()),
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.is_empty())`
+ | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.is_empty())`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:67:5
@@ -89,7 +89,7 @@ LL | / match &&Some(String::new()) {
LL | | Some(x) => Some(x.len()),
LL | | _ => None,
LL | | };
- | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.len())`
+ | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.len())`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:72:5
@@ -98,7 +98,7 @@ LL | / match &&Some(0) {
LL | | &&Some(x) => Some(x + x),
LL | | &&_ => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| x + x)`
+ | |_____^ help: try: `Some(0).map(|x| x + x)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:85:9
@@ -107,7 +107,7 @@ LL | / match &mut Some(String::new()) {
LL | | Some(x) => Some(x.push_str("")),
LL | | None => None,
LL | | };
- | |_________^ help: try this: `Some(String::new()).as_mut().map(|x| x.push_str(""))`
+ | |_________^ help: try: `Some(String::new()).as_mut().map(|x| x.push_str(""))`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:91:5
@@ -116,7 +116,7 @@ LL | / match &mut Some(String::new()) {
LL | | Some(ref x) => Some(x.len()),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.len())`
+ | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.len())`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:96:5
@@ -125,7 +125,7 @@ LL | / match &mut &Some(String::new()) {
LL | | Some(x) => Some(x.is_empty()),
LL | | &mut _ => None,
LL | | };
- | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.is_empty())`
+ | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.is_empty())`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:101:5
@@ -134,7 +134,7 @@ LL | / match Some((0, 1, 2)) {
LL | | Some((x, y, z)) => Some(x + y + z),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some((0, 1, 2)).map(|(x, y, z)| x + y + z)`
+ | |_____^ help: try: `Some((0, 1, 2)).map(|(x, y, z)| x + y + z)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:106:5
@@ -143,7 +143,7 @@ LL | / match Some([1, 2, 3]) {
LL | | Some([first, ..]) => Some(first),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some([1, 2, 3]).map(|[first, ..]| first)`
+ | |_____^ help: try: `Some([1, 2, 3]).map(|[first, ..]| first)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:111:5
@@ -152,7 +152,7 @@ LL | / match &Some((String::new(), "test")) {
LL | | Some((x, y)) => Some((y, x)),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some((String::new(), "test")).as_ref().map(|(x, y)| (y, x))`
+ | |_____^ help: try: `Some((String::new(), "test")).as_ref().map(|(x, y)| (y, x))`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:169:5
@@ -161,7 +161,7 @@ LL | / match Some(0) {
LL | | Some(x) => Some(vec![x]),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| vec![x])`
+ | |_____^ help: try: `Some(0).map(|x| vec![x])`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:174:5
@@ -170,7 +170,7 @@ LL | / match option_env!("") {
LL | | Some(x) => Some(String::from(x)),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `option_env!("").map(String::from)`
+ | |_____^ help: try: `option_env!("").map(String::from)`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:194:12
@@ -181,7 +181,7 @@ LL | | Some(x + 1)
LL | | } else {
LL | | None
LL | | };
- | |_____^ help: try this: `{ Some(0).map(|x| x + 1) }`
+ | |_____^ help: try: `{ Some(0).map(|x| x + 1) }`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option.rs:202:12
@@ -192,7 +192,7 @@ LL | | Some(x + 1)
LL | | } else {
LL | | None
LL | | };
- | |_____^ help: try this: `{ Some(0).map(|x| x + 1) }`
+ | |_____^ help: try: `{ Some(0).map(|x| x + 1) }`
error: aborting due to 21 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.stderr b/src/tools/clippy/tests/ui/manual_map_option_2.stderr
index d35b6252f..8c78fcffc 100644
--- a/src/tools/clippy/tests/ui/manual_map_option_2.stderr
+++ b/src/tools/clippy/tests/ui/manual_map_option_2.stderr
@@ -12,7 +12,7 @@ LL | | };
| |_____^
|
= note: `-D clippy::manual-map` implied by `-D warnings`
-help: try this
+help: try
|
LL ~ let _ = Some(0).map(|x| {
LL + let y = (String::new(), String::new());
@@ -32,7 +32,7 @@ LL | | None => None,
LL | | };
| |_____^
|
-help: try this
+help: try
|
LL ~ let _ = s.as_ref().map(|x| {
LL + if let Some(ref s) = s { (x.clone(), s) } else { panic!() }
@@ -47,7 +47,7 @@ LL | let _ = match Some(0) {
LL | | Some(x) => Some(f(x)),
LL | | None => None,
LL | | };
- | |_________^ help: try this: `Some(0).map(|x| f(x))`
+ | |_________^ help: try: `Some(0).map(|x| f(x))`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option_2.rs:67:13
@@ -57,7 +57,7 @@ LL | let _ = match Some(0) {
LL | | Some(x) => unsafe { Some(f(x)) },
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| unsafe { f(x) })`
+ | |_____^ help: try: `Some(0).map(|x| unsafe { f(x) })`
error: manual implementation of `Option::map`
--> $DIR/manual_map_option_2.rs:71:13
@@ -67,7 +67,7 @@ LL | let _ = match Some(0) {
LL | | Some(x) => Some(unsafe { f(x) }),
LL | | None => None,
LL | | };
- | |_____^ help: try this: `Some(0).map(|x| unsafe { f(x) })`
+ | |_____^ help: try: `Some(0).map(|x| unsafe { f(x) })`
error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_range_patterns.fixed b/src/tools/clippy/tests/ui/manual_range_patterns.fixed
index 9eee8f371..6bfcf263a 100644
--- a/src/tools/clippy/tests/ui/manual_range_patterns.fixed
+++ b/src/tools/clippy/tests/ui/manual_range_patterns.fixed
@@ -25,6 +25,10 @@ fn main() {
1..=10 => true,
_ => false,
};
+ let _ = matches!(f, -5..=3);
+ let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1); // 2 is missing
+ let _ = matches!(f, -1000001..=1000001);
+ let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_002);
macro_rules! mac {
($e:expr) => {
diff --git a/src/tools/clippy/tests/ui/manual_range_patterns.rs b/src/tools/clippy/tests/ui/manual_range_patterns.rs
index 10743a7d0..4a429bb2a 100644
--- a/src/tools/clippy/tests/ui/manual_range_patterns.rs
+++ b/src/tools/clippy/tests/ui/manual_range_patterns.rs
@@ -25,6 +25,10 @@ fn main() {
1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 => true,
_ => false,
};
+ let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1 | 2);
+ let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1); // 2 is missing
+ let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_001);
+ let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_002);
macro_rules! mac {
($e:expr) => {
diff --git a/src/tools/clippy/tests/ui/manual_range_patterns.stderr b/src/tools/clippy/tests/ui/manual_range_patterns.stderr
index bc9e33501..b1b55d483 100644
--- a/src/tools/clippy/tests/ui/manual_range_patterns.stderr
+++ b/src/tools/clippy/tests/ui/manual_range_patterns.stderr
@@ -37,7 +37,19 @@ LL | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 => true,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `1..=10`
error: this OR pattern can be rewritten using a range
- --> $DIR/manual_range_patterns.rs:31:26
+ --> $DIR/manual_range_patterns.rs:28:25
+ |
+LL | let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1 | 2);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-5..=3`
+
+error: this OR pattern can be rewritten using a range
+ --> $DIR/manual_range_patterns.rs:30:25
+ |
+LL | let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_001);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-1000001..=1000001`
+
+error: this OR pattern can be rewritten using a range
+ --> $DIR/manual_range_patterns.rs:35:26
|
LL | matches!($e, 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `1..=10`
@@ -47,5 +59,5 @@ LL | mac!(f);
|
= note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to 7 previous errors
+error: aborting due to 9 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_retain.fixed b/src/tools/clippy/tests/ui/manual_retain.fixed
index 09fb0d758..c95d40fec 100644
--- a/src/tools/clippy/tests/ui/manual_retain.fixed
+++ b/src/tools/clippy/tests/ui/manual_retain.fixed
@@ -1,12 +1,7 @@
//@run-rustfix
#![warn(clippy::manual_retain)]
#![allow(unused, clippy::redundant_clone)]
-use std::collections::BTreeMap;
-use std::collections::BTreeSet;
-use std::collections::BinaryHeap;
-use std::collections::HashMap;
-use std::collections::HashSet;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque};
fn main() {
binary_heap_retain();
diff --git a/src/tools/clippy/tests/ui/manual_retain.rs b/src/tools/clippy/tests/ui/manual_retain.rs
index 7fee4c95c..9a3434f48 100644
--- a/src/tools/clippy/tests/ui/manual_retain.rs
+++ b/src/tools/clippy/tests/ui/manual_retain.rs
@@ -1,12 +1,7 @@
//@run-rustfix
#![warn(clippy::manual_retain)]
#![allow(unused, clippy::redundant_clone)]
-use std::collections::BTreeMap;
-use std::collections::BTreeSet;
-use std::collections::BinaryHeap;
-use std::collections::HashMap;
-use std::collections::HashSet;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque};
fn main() {
binary_heap_retain();
diff --git a/src/tools/clippy/tests/ui/manual_retain.stderr b/src/tools/clippy/tests/ui/manual_retain.stderr
index 89316ce1d..0936a2384 100644
--- a/src/tools/clippy/tests/ui/manual_retain.stderr
+++ b/src/tools/clippy/tests/ui/manual_retain.stderr
@@ -1,5 +1,5 @@
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:51:5
+ --> $DIR/manual_retain.rs:46:5
|
LL | btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_map.retain(|k, _| k % 2 == 0)`
@@ -7,13 +7,13 @@ LL | btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect()
= note: `-D clippy::manual-retain` implied by `-D warnings`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:52:5
+ --> $DIR/manual_retain.rs:47:5
|
LL | btree_map = btree_map.into_iter().filter(|(_, v)| v % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_map.retain(|_, &mut v| v % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:53:5
+ --> $DIR/manual_retain.rs:48:5
|
LL | / btree_map = btree_map
LL | | .into_iter()
@@ -22,37 +22,37 @@ LL | | .collect();
| |__________________^ help: consider calling `.retain()` instead: `btree_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0))`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:75:5
+ --> $DIR/manual_retain.rs:70:5
|
LL | btree_set = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:76:5
+ --> $DIR/manual_retain.rs:71:5
|
LL | btree_set = btree_set.iter().filter(|&x| x % 2 == 0).cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:77:5
+ --> $DIR/manual_retain.rs:72:5
|
LL | btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:107:5
+ --> $DIR/manual_retain.rs:102:5
|
LL | hash_map = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_map.retain(|k, _| k % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:108:5
+ --> $DIR/manual_retain.rs:103:5
|
LL | hash_map = hash_map.into_iter().filter(|(_, v)| v % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_map.retain(|_, &mut v| v % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:109:5
+ --> $DIR/manual_retain.rs:104:5
|
LL | / hash_map = hash_map
LL | | .into_iter()
@@ -61,61 +61,61 @@ LL | | .collect();
| |__________________^ help: consider calling `.retain()` instead: `hash_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0))`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:130:5
+ --> $DIR/manual_retain.rs:125:5
|
LL | hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:131:5
+ --> $DIR/manual_retain.rs:126:5
|
LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).copied().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:132:5
+ --> $DIR/manual_retain.rs:127:5
|
LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:161:5
+ --> $DIR/manual_retain.rs:156:5
|
LL | s = s.chars().filter(|&c| c != 'o').to_owned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `s.retain(|c| c != 'o')`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:173:5
+ --> $DIR/manual_retain.rs:168:5
|
LL | vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:174:5
+ --> $DIR/manual_retain.rs:169:5
|
LL | vec = vec.iter().filter(|&x| x % 2 == 0).cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:175:5
+ --> $DIR/manual_retain.rs:170:5
|
LL | vec = vec.into_iter().filter(|x| x % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:197:5
+ --> $DIR/manual_retain.rs:192:5
|
LL | vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).copied().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:198:5
+ --> $DIR/manual_retain.rs:193:5
|
LL | vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).cloned().collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)`
error: this expression can be written more simply using `.retain()`
- --> $DIR/manual_retain.rs:199:5
+ --> $DIR/manual_retain.rs:194:5
|
LL | vec_deque = vec_deque.into_iter().filter(|x| x % 2 == 0).collect();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)`
diff --git a/src/tools/clippy/tests/ui/manual_split_once.stderr b/src/tools/clippy/tests/ui/manual_split_once.stderr
index 78da5a16c..f454f95b4 100644
--- a/src/tools/clippy/tests/ui/manual_split_once.stderr
+++ b/src/tools/clippy/tests/ui/manual_split_once.stderr
@@ -2,7 +2,7 @@ error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:13:13
|
LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1`
|
= note: `-D clippy::manual-split-once` implied by `-D warnings`
@@ -10,73 +10,73 @@ error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:14:13
|
LL | let _ = "key=value".splitn(2, '=').skip(1).next().unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:15:18
|
LL | let (_, _) = "key=value".splitn(2, '=').next_tuple().unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=')`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=')`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:18:13
|
LL | let _ = s.splitn(2, '=').nth(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:21:13
|
LL | let _ = s.splitn(2, '=').nth(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:24:13
|
LL | let _ = s.splitn(2, '=').skip(1).next().unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:27:17
|
LL | let _ = s.splitn(2, '=').nth(1)?;
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=')?.1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=')?.1`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:28:17
|
LL | let _ = s.splitn(2, '=').skip(1).next()?;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=')?.1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=')?.1`
error: manual implementation of `rsplit_once`
--> $DIR/manual_split_once.rs:29:17
|
LL | let _ = s.rsplitn(2, '=').nth(1)?;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=')?.0`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=')?.0`
error: manual implementation of `rsplit_once`
--> $DIR/manual_split_once.rs:30:17
|
LL | let _ = s.rsplitn(2, '=').skip(1).next()?;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=')?.0`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=')?.0`
error: manual implementation of `rsplit_once`
--> $DIR/manual_split_once.rs:38:13
|
LL | let _ = "key=value".rsplitn(2, '=').nth(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".rsplit_once('=').unwrap().0`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".rsplit_once('=').unwrap().0`
error: manual implementation of `rsplit_once`
--> $DIR/manual_split_once.rs:39:18
|
LL | let (_, _) = "key=value".rsplitn(2, '=').next_tuple().unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".rsplit_once('=').map(|(x, y)| (y, x))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".rsplit_once('=').map(|(x, y)| (y, x))`
error: manual implementation of `rsplit_once`
--> $DIR/manual_split_once.rs:40:13
|
LL | let _ = s.rsplitn(2, '=').nth(1);
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=').map(|x| x.0)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=').map(|x| x.0)`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:44:5
@@ -182,7 +182,7 @@ error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:141:13
|
LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1`
error: manual implementation of `split_once`
--> $DIR/manual_split_once.rs:143:5
diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.stderr b/src/tools/clippy/tests/ui/manual_str_repeat.stderr
index bdfee7cab..331bb6ea5 100644
--- a/src/tools/clippy/tests/ui/manual_str_repeat.stderr
+++ b/src/tools/clippy/tests/ui/manual_str_repeat.stderr
@@ -2,7 +2,7 @@ error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:9:21
|
LL | let _: String = std::iter::repeat("test").take(10).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"test".repeat(10)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)`
|
= note: `-D clippy::manual-str-repeat` implied by `-D warnings`
@@ -10,55 +10,55 @@ error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:10:21
|
LL | let _: String = std::iter::repeat('x').take(10).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"x".repeat(10)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"x".repeat(10)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:11:21
|
LL | let _: String = std::iter::repeat('/'').take(10).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"'".repeat(10)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"'".repeat(10)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:12:21
|
LL | let _: String = std::iter::repeat('"').take(10).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"/"".repeat(10)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"/"".repeat(10)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:16:13
|
LL | let _ = repeat(x).take(count + 2).collect::<String>();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count + 2)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count + 2)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:25:21
|
LL | let _: String = repeat(*x).take(count).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(*x).repeat(count)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*x).repeat(count)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:34:21
|
LL | let _: String = repeat(x).take(count).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:46:21
|
LL | let _: String = repeat(Cow::Borrowed("test")).take(count).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Cow::Borrowed("test").repeat(count)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Cow::Borrowed("test").repeat(count)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:49:21
|
LL | let _: String = repeat(x).take(count).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)`
error: manual implementation of `str::repeat` using iterators
--> $DIR/manual_str_repeat.rs:64:21
|
LL | let _: String = std::iter::repeat("test").take(10).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"test".repeat(10)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)`
error: aborting due to 10 previous errors
diff --git a/src/tools/clippy/tests/ui/manual_try_fold.rs b/src/tools/clippy/tests/ui/manual_try_fold.rs
index 4521e9fa1..05c658579 100644
--- a/src/tools/clippy/tests/ui/manual_try_fold.rs
+++ b/src/tools/clippy/tests/ui/manual_try_fold.rs
@@ -3,9 +3,7 @@
#![warn(clippy::manual_try_fold)]
#![feature(try_trait_v2)]
-use std::ops::ControlFlow;
-use std::ops::FromResidual;
-use std::ops::Try;
+use std::ops::{ControlFlow, FromResidual, Try};
#[macro_use]
extern crate proc_macros;
diff --git a/src/tools/clippy/tests/ui/manual_try_fold.stderr b/src/tools/clippy/tests/ui/manual_try_fold.stderr
index a0cf5b3b5..f1bb97c6d 100644
--- a/src/tools/clippy/tests/ui/manual_try_fold.stderr
+++ b/src/tools/clippy/tests/ui/manual_try_fold.stderr
@@ -1,5 +1,5 @@
error: usage of `Iterator::fold` on a type that implements `Try`
- --> $DIR/manual_try_fold.rs:61:10
+ --> $DIR/manual_try_fold.rs:59:10
|
LL | .fold(Some(0i32), |sum, i| sum?.checked_add(*i))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(0i32, |sum, i| ...)`
@@ -7,19 +7,19 @@ LL | .fold(Some(0i32), |sum, i| sum?.checked_add(*i))
= note: `-D clippy::manual-try-fold` implied by `-D warnings`
error: usage of `Iterator::fold` on a type that implements `Try`
- --> $DIR/manual_try_fold.rs:65:10
+ --> $DIR/manual_try_fold.rs:63:10
|
LL | .fold(NotOption(0i32, 0i32), |sum, i| NotOption(0i32, 0i32));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(..., |sum, i| ...)`
error: usage of `Iterator::fold` on a type that implements `Try`
- --> $DIR/manual_try_fold.rs:68:10
+ --> $DIR/manual_try_fold.rs:66:10
|
LL | .fold(NotOptionButWorse(0i32), |sum, i| NotOptionButWorse(0i32));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(0i32, |sum, i| ...)`
error: usage of `Iterator::fold` on a type that implements `Try`
- --> $DIR/manual_try_fold.rs:98:10
+ --> $DIR/manual_try_fold.rs:96:10
|
LL | .fold(Some(0i32), |sum, i| sum?.checked_add(*i))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(0i32, |sum, i| ...)`
diff --git a/src/tools/clippy/tests/ui/map_collect_result_unit.stderr b/src/tools/clippy/tests/ui/map_collect_result_unit.stderr
index 8b06e13ba..596e51e57 100644
--- a/src/tools/clippy/tests/ui/map_collect_result_unit.stderr
+++ b/src/tools/clippy/tests/ui/map_collect_result_unit.stderr
@@ -2,7 +2,7 @@ error: `.map().collect()` can be replaced with `.try_for_each()`
--> $DIR/map_collect_result_unit.rs:6:17
|
LL | let _ = (0..3).map(|t| Err(t + 1)).collect::<Result<(), _>>();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(0..3).try_for_each(|t| Err(t + 1))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(0..3).try_for_each(|t| Err(t + 1))`
|
= note: `-D clippy::map-collect-result-unit` implied by `-D warnings`
@@ -10,7 +10,7 @@ error: `.map().collect()` can be replaced with `.try_for_each()`
--> $DIR/map_collect_result_unit.rs:7:32
|
LL | let _: Result<(), _> = (0..3).map(|t| Err(t + 1)).collect();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(0..3).try_for_each(|t| Err(t + 1))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(0..3).try_for_each(|t| Err(t + 1))`
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or.stderr b/src/tools/clippy/tests/ui/map_unwrap_or.stderr
index 9f4a4a9ae..5b3c61acf 100644
--- a/src/tools/clippy/tests/ui/map_unwrap_or.stderr
+++ b/src/tools/clippy/tests/ui/map_unwrap_or.stderr
@@ -162,7 +162,7 @@ error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be do
--> $DIR/map_unwrap_or.rs:99:13
|
LL | let _ = res.map(|x| x + 1).unwrap_or_else(|_e| 0);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `res.map_or_else(|_e| 0, |x| x + 1)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `res.map_or_else(|_e| 0, |x| x + 1)`
error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead
--> $DIR/map_unwrap_or.rs:106:13
diff --git a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
index 1837bc2ca..71dc009f2 100644
--- a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
+++ b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr
@@ -5,7 +5,7 @@ LL | let _ = opt.map(|x| x + 1)
| _____________^
LL | | // Should lint even though this call is on a separate line.
LL | | .unwrap_or_else(|| 0);
- | |_____________________________^ help: try this: `opt.map_or_else(|| 0, |x| x + 1)`
+ | |_____________________________^ help: try: `opt.map_or_else(|| 0, |x| x + 1)`
|
= note: `-D clippy::map-unwrap-or` implied by `-D warnings`
@@ -16,7 +16,7 @@ LL | let _ = res.map(|x| x + 1)
| _____________^
LL | | // should lint even though this call is on a separate line
LL | | .unwrap_or_else(|_e| 0);
- | |_______________________________^ help: try this: `res.map_or_else(|_e| 0, |x| x + 1)`
+ | |_______________________________^ help: try: `res.map_or_else(|_e| 0, |x| x + 1)`
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/match_as_ref.fixed b/src/tools/clippy/tests/ui/match_as_ref.fixed
index 8fa3f5325..61d414bdf 100644
--- a/src/tools/clippy/tests/ui/match_as_ref.fixed
+++ b/src/tools/clippy/tests/ui/match_as_ref.fixed
@@ -12,7 +12,9 @@ fn match_as_ref() {
}
mod issue4437 {
- use std::{error::Error, fmt, num::ParseIntError};
+ use std::error::Error;
+ use std::fmt;
+ use std::num::ParseIntError;
#[derive(Debug)]
struct E {
diff --git a/src/tools/clippy/tests/ui/match_as_ref.rs b/src/tools/clippy/tests/ui/match_as_ref.rs
index 02a177914..cd39514c5 100644
--- a/src/tools/clippy/tests/ui/match_as_ref.rs
+++ b/src/tools/clippy/tests/ui/match_as_ref.rs
@@ -18,7 +18,9 @@ fn match_as_ref() {
}
mod issue4437 {
- use std::{error::Error, fmt, num::ParseIntError};
+ use std::error::Error;
+ use std::fmt;
+ use std::num::ParseIntError;
#[derive(Debug)]
struct E {
diff --git a/src/tools/clippy/tests/ui/match_as_ref.stderr b/src/tools/clippy/tests/ui/match_as_ref.stderr
index c3b62849c..2e6955eb8 100644
--- a/src/tools/clippy/tests/ui/match_as_ref.stderr
+++ b/src/tools/clippy/tests/ui/match_as_ref.stderr
@@ -6,7 +6,7 @@ LL | let borrowed: Option<&()> = match owned {
LL | | None => None,
LL | | Some(ref v) => Some(v),
LL | | };
- | |_____^ help: try this: `owned.as_ref()`
+ | |_____^ help: try: `owned.as_ref()`
|
= note: `-D clippy::match-as-ref` implied by `-D warnings`
@@ -18,16 +18,16 @@ LL | let borrow_mut: Option<&mut ()> = match mut_owned {
LL | | None => None,
LL | | Some(ref mut v) => Some(v),
LL | | };
- | |_____^ help: try this: `mut_owned.as_mut()`
+ | |_____^ help: try: `mut_owned.as_mut()`
error: use `as_ref()` instead
- --> $DIR/match_as_ref.rs:30:13
+ --> $DIR/match_as_ref.rs:32:13
|
LL | / match self.source {
LL | | Some(ref s) => Some(s),
LL | | None => None,
LL | | }
- | |_____________^ help: try this: `self.source.as_ref().map(|x| x as _)`
+ | |_____________^ help: try: `self.source.as_ref().map(|x| x as _)`
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed
index 60f590661..f19149cf9 100644
--- a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed
+++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.fixed
@@ -5,7 +5,8 @@
unreachable_patterns,
dead_code,
clippy::equatable_if_let,
- clippy::needless_borrowed_reference
+ clippy::needless_borrowed_reference,
+ clippy::redundant_guards
)]
fn main() {
diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs
index afdf1069f..8f4e58981 100644
--- a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs
+++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.rs
@@ -5,7 +5,8 @@
unreachable_patterns,
dead_code,
clippy::equatable_if_let,
- clippy::needless_borrowed_reference
+ clippy::needless_borrowed_reference,
+ clippy::redundant_guards
)]
fn main() {
diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr
index b72fe10b7..b57b26284 100644
--- a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr
+++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr
@@ -1,55 +1,55 @@
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:15:14
+ --> $DIR/match_expr_like_matches_macro.rs:16:14
|
LL | let _y = match x {
| ______________^
LL | | Some(0) => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `matches!(x, Some(0))`
+ | |_____^ help: try: `matches!(x, Some(0))`
|
= note: `-D clippy::match-like-matches-macro` implied by `-D warnings`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/match_expr_like_matches_macro.rs:21:14
+ --> $DIR/match_expr_like_matches_macro.rs:22:14
|
LL | let _w = match x {
| ______________^
LL | | Some(_) => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `x.is_some()`
+ | |_____^ help: try: `x.is_some()`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/match_expr_like_matches_macro.rs:27:14
+ --> $DIR/match_expr_like_matches_macro.rs:28:14
|
LL | let _z = match x {
| ______________^
LL | | Some(_) => false,
LL | | None => true,
LL | | };
- | |_____^ help: try this: `x.is_none()`
+ | |_____^ help: try: `x.is_none()`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:33:15
+ --> $DIR/match_expr_like_matches_macro.rs:34:15
|
LL | let _zz = match x {
| _______________^
LL | | Some(r) if r == 0 => false,
LL | | _ => true,
LL | | };
- | |_____^ help: try this: `!matches!(x, Some(r) if r == 0)`
+ | |_____^ help: try: `!matches!(x, Some(r) if r == 0)`
error: if let .. else expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:39:16
+ --> $DIR/match_expr_like_matches_macro.rs:40:16
|
LL | let _zzz = if let Some(5) = x { true } else { false };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `matches!(x, Some(5))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(x, Some(5))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:63:20
+ --> $DIR/match_expr_like_matches_macro.rs:64:20
|
LL | let _ans = match x {
| ____________________^
@@ -57,10 +57,10 @@ LL | | E::A(_) => true,
LL | | E::B(_) => true,
LL | | _ => false,
LL | | };
- | |_________^ help: try this: `matches!(x, E::A(_) | E::B(_))`
+ | |_________^ help: try: `matches!(x, E::A(_) | E::B(_))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:73:20
+ --> $DIR/match_expr_like_matches_macro.rs:74:20
|
LL | let _ans = match x {
| ____________________^
@@ -70,10 +70,10 @@ LL | | }
LL | | E::B(_) => true,
LL | | _ => false,
LL | | };
- | |_________^ help: try this: `matches!(x, E::A(_) | E::B(_))`
+ | |_________^ help: try: `matches!(x, E::A(_) | E::B(_))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:83:20
+ --> $DIR/match_expr_like_matches_macro.rs:84:20
|
LL | let _ans = match x {
| ____________________^
@@ -81,67 +81,67 @@ LL | | E::B(_) => false,
LL | | E::C => false,
LL | | _ => true,
LL | | };
- | |_________^ help: try this: `!matches!(x, E::B(_) | E::C)`
+ | |_________^ help: try: `!matches!(x, E::B(_) | E::C)`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:143:18
+ --> $DIR/match_expr_like_matches_macro.rs:144:18
|
LL | let _z = match &z {
| __________________^
LL | | Some(3) => true,
LL | | _ => false,
LL | | };
- | |_________^ help: try this: `matches!(z, Some(3))`
+ | |_________^ help: try: `matches!(z, Some(3))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:152:18
+ --> $DIR/match_expr_like_matches_macro.rs:153:18
|
LL | let _z = match &z {
| __________________^
LL | | Some(3) => true,
LL | | _ => false,
LL | | };
- | |_________^ help: try this: `matches!(&z, Some(3))`
+ | |_________^ help: try: `matches!(&z, Some(3))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:169:21
+ --> $DIR/match_expr_like_matches_macro.rs:170:21
|
LL | let _ = match &z {
| _____________________^
LL | | AnEnum::X => true,
LL | | _ => false,
LL | | };
- | |_____________^ help: try this: `matches!(&z, AnEnum::X)`
+ | |_____________^ help: try: `matches!(&z, AnEnum::X)`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:183:20
+ --> $DIR/match_expr_like_matches_macro.rs:184:20
|
LL | let _res = match &val {
| ____________________^
LL | | &Some(ref _a) => true,
LL | | _ => false,
LL | | };
- | |_________^ help: try this: `matches!(&val, &Some(ref _a))`
+ | |_________^ help: try: `matches!(&val, &Some(ref _a))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:195:20
+ --> $DIR/match_expr_like_matches_macro.rs:196:20
|
LL | let _res = match &val {
| ____________________^
LL | | &Some(ref _a) => true,
LL | | _ => false,
LL | | };
- | |_________^ help: try this: `matches!(&val, &Some(ref _a))`
+ | |_________^ help: try: `matches!(&val, &Some(ref _a))`
error: match expression looks like `matches!` macro
- --> $DIR/match_expr_like_matches_macro.rs:253:14
+ --> $DIR/match_expr_like_matches_macro.rs:254:14
|
LL | let _y = match Some(5) {
| ______________^
LL | | Some(0) => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `matches!(Some(5), Some(0))`
+ | |_____^ help: try: `matches!(Some(5), Some(0))`
error: aborting due to 14 previous errors
diff --git a/src/tools/clippy/tests/ui/match_on_vec_items.stderr b/src/tools/clippy/tests/ui/match_on_vec_items.stderr
index 9b1f05286..fc4a3ce19 100644
--- a/src/tools/clippy/tests/ui/match_on_vec_items.stderr
+++ b/src/tools/clippy/tests/ui/match_on_vec_items.stderr
@@ -2,7 +2,7 @@ error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:10:11
|
LL | match arr[idx] {
- | ^^^^^^^^ help: try this: `arr.get(idx)`
+ | ^^^^^^^^ help: try: `arr.get(idx)`
|
= note: `-D clippy::match-on-vec-items` implied by `-D warnings`
@@ -10,43 +10,43 @@ error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:17:11
|
LL | match arr[range] {
- | ^^^^^^^^^^ help: try this: `arr.get(range)`
+ | ^^^^^^^^^^ help: try: `arr.get(range)`
error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:30:11
|
LL | match arr[idx] {
- | ^^^^^^^^ help: try this: `arr.get(idx)`
+ | ^^^^^^^^ help: try: `arr.get(idx)`
error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:37:11
|
LL | match arr[range] {
- | ^^^^^^^^^^ help: try this: `arr.get(range)`
+ | ^^^^^^^^^^ help: try: `arr.get(range)`
error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:50:11
|
LL | match arr[idx] {
- | ^^^^^^^^ help: try this: `arr.get(idx)`
+ | ^^^^^^^^ help: try: `arr.get(idx)`
error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:57:11
|
LL | match arr[range] {
- | ^^^^^^^^^^ help: try this: `arr.get(range)`
+ | ^^^^^^^^^^ help: try: `arr.get(range)`
error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:70:11
|
LL | match arr[idx] {
- | ^^^^^^^^ help: try this: `arr.get(idx)`
+ | ^^^^^^^^ help: try: `arr.get(idx)`
error: indexing into a vector may panic
--> $DIR/match_on_vec_items.rs:77:11
|
LL | match arr[range] {
- | ^^^^^^^^^^ help: try this: `arr.get(range)`
+ | ^^^^^^^^^^ help: try: `arr.get(range)`
error: aborting due to 8 previous errors
diff --git a/src/tools/clippy/tests/ui/match_ref_pats.stderr b/src/tools/clippy/tests/ui/match_ref_pats.stderr
index 7d9646c84..1294e0fe5 100644
--- a/src/tools/clippy/tests/ui/match_ref_pats.stderr
+++ b/src/tools/clippy/tests/ui/match_ref_pats.stderr
@@ -35,7 +35,7 @@ error: redundant pattern matching, consider using `is_none()`
--> $DIR/match_ref_pats.rs:38:12
|
LL | if let &None = a {
- | -------^^^^^---- help: try this: `if a.is_none()`
+ | -------^^^^^---- help: try: `if a.is_none()`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
@@ -43,7 +43,7 @@ error: redundant pattern matching, consider using `is_none()`
--> $DIR/match_ref_pats.rs:43:12
|
LL | if let &None = &b {
- | -------^^^^^----- help: try this: `if b.is_none()`
+ | -------^^^^^----- help: try: `if b.is_none()`
error: you don't need to add `&` to all patterns
--> $DIR/match_ref_pats.rs:103:9
diff --git a/src/tools/clippy/tests/ui/match_same_arms2.stderr b/src/tools/clippy/tests/ui/match_same_arms2.stderr
index 7f0c70745..a73481875 100644
--- a/src/tools/clippy/tests/ui/match_same_arms2.stderr
+++ b/src/tools/clippy/tests/ui/match_same_arms2.stderr
@@ -144,7 +144,7 @@ LL | | E::A => false,
LL | | E::B => false,
LL | | _ => true,
LL | | };
- | |_____^ help: try this: `!matches!(x, E::A | E::B)`
+ | |_____^ help: try: `!matches!(x, E::A | E::B)`
|
= note: `-D clippy::match-like-matches-macro` implied by `-D warnings`
diff --git a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr
index 105b4c4b4..40ff4fbd3 100644
--- a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr
+++ b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr
@@ -2,7 +2,7 @@ error: wildcard matches only a single variant and will also match any future add
--> $DIR/match_wildcard_for_single_variants.rs:24:13
|
LL | _ => (),
- | ^ help: try this: `Self::Rgb(..)`
+ | ^ help: try: `Self::Rgb(..)`
|
= note: `-D clippy::match-wildcard-for-single-variants` implied by `-D warnings`
@@ -10,55 +10,55 @@ error: wildcard matches only a single variant and will also match any future add
--> $DIR/match_wildcard_for_single_variants.rs:34:9
|
LL | _ => {},
- | ^ help: try this: `Foo::C`
+ | ^ help: try: `Foo::C`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:44:9
|
LL | _ => {},
- | ^ help: try this: `Color::Blue`
+ | ^ help: try: `Color::Blue`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:52:9
|
LL | _ => {},
- | ^ help: try this: `Color::Blue`
+ | ^ help: try: `Color::Blue`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:58:9
|
LL | _ => {},
- | ^ help: try this: `Color::Blue`
+ | ^ help: try: `Color::Blue`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:75:9
|
LL | &_ => (),
- | ^^ help: try this: `Color::Blue`
+ | ^^ help: try: `Color::Blue`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:84:9
|
LL | _ => (),
- | ^ help: try this: `C::Blue`
+ | ^ help: try: `C::Blue`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:91:9
|
LL | _ => (),
- | ^ help: try this: `Color::Blue`
+ | ^ help: try: `Color::Blue`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:126:13
|
LL | _ => (),
- | ^ help: try this: `Enum::__Private`
+ | ^ help: try: `Enum::__Private`
error: wildcard matches only a single variant and will also match any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:153:13
|
LL | _ => 2,
- | ^ help: try this: `Foo::B`
+ | ^ help: try: `Foo::B`
error: aborting due to 10 previous errors
diff --git a/src/tools/clippy/tests/ui/methods.rs b/src/tools/clippy/tests/ui/methods.rs
index 589eab5cd..cb1f695c6 100644
--- a/src/tools/clippy/tests/ui/methods.rs
+++ b/src/tools/clippy/tests/ui/methods.rs
@@ -25,10 +25,7 @@
#[macro_use]
extern crate option_helpers;
-use std::collections::BTreeMap;
-use std::collections::HashMap;
-use std::collections::HashSet;
-use std::collections::VecDeque;
+use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
use std::ops::Mul;
use std::rc::{self, Rc};
use std::sync::{self, Arc};
diff --git a/src/tools/clippy/tests/ui/methods.stderr b/src/tools/clippy/tests/ui/methods.stderr
index 73ec48643..6be38b24f 100644
--- a/src/tools/clippy/tests/ui/methods.stderr
+++ b/src/tools/clippy/tests/ui/methods.stderr
@@ -1,5 +1,5 @@
error: methods called `new` usually return `Self`
- --> $DIR/methods.rs:106:5
+ --> $DIR/methods.rs:103:5
|
LL | / fn new() -> i32 {
LL | | 0
@@ -9,7 +9,7 @@ LL | | }
= note: `-D clippy::new-ret-no-self` implied by `-D warnings`
error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead
- --> $DIR/methods.rs:127:13
+ --> $DIR/methods.rs:124:13
|
LL | let _ = v.iter().filter(|&x| {
| _____________^
diff --git a/src/tools/clippy/tests/ui/methods_fixable.stderr b/src/tools/clippy/tests/ui/methods_fixable.stderr
index 187714c75..6f45d100d 100644
--- a/src/tools/clippy/tests/ui/methods_fixable.stderr
+++ b/src/tools/clippy/tests/ui/methods_fixable.stderr
@@ -2,7 +2,7 @@ error: called `filter(..).next()` on an `Iterator`. This is more succinctly expr
--> $DIR/methods_fixable.rs:11:13
|
LL | let _ = v.iter().filter(|&x| *x < 0).next();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `v.iter().find(|&x| *x < 0)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `v.iter().find(|&x| *x < 0)`
|
= note: `-D clippy::filter-next` implied by `-D warnings`
diff --git a/src/tools/clippy/tests/ui/methods_unfixable.rs b/src/tools/clippy/tests/ui/methods_unfixable.rs
new file mode 100644
index 000000000..3d88ce4b6
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods_unfixable.rs
@@ -0,0 +1,10 @@
+#![warn(clippy::filter_next)]
+
+fn main() {
+ issue10029();
+}
+
+pub fn issue10029() {
+ let iter = (0..10);
+ let _ = iter.filter(|_| true).next();
+}
diff --git a/src/tools/clippy/tests/ui/methods_unfixable.stderr b/src/tools/clippy/tests/ui/methods_unfixable.stderr
new file mode 100644
index 000000000..6e101fe16
--- /dev/null
+++ b/src/tools/clippy/tests/ui/methods_unfixable.stderr
@@ -0,0 +1,15 @@
+error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead
+ --> $DIR/methods_unfixable.rs:9:13
+ |
+LL | let _ = iter.filter(|_| true).next();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `iter.find(|_| true)`
+ |
+help: you will also need to make `iter` mutable, because `find` takes `&mut self`
+ --> $DIR/methods_unfixable.rs:8:9
+ |
+LL | let iter = (0..10);
+ | ^^^^
+ = note: `-D clippy::filter-next` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/min_ident_chars.rs b/src/tools/clippy/tests/ui/min_ident_chars.rs
index b5b9e66aa..03784442e 100644
--- a/src/tools/clippy/tests/ui/min_ident_chars.rs
+++ b/src/tools/clippy/tests/ui/min_ident_chars.rs
@@ -3,8 +3,7 @@
#![warn(clippy::min_ident_chars)]
extern crate proc_macros;
-use proc_macros::external;
-use proc_macros::with_span;
+use proc_macros::{external, with_span};
struct A {
a: u32,
@@ -82,3 +81,7 @@ fn b() {}
fn wrong_pythagoras(a: f32, b: f32) -> f32 {
a * a + a * b
}
+
+mod issue_11163 {
+ struct Array<T, const N: usize>([T; N]);
+}
diff --git a/src/tools/clippy/tests/ui/min_ident_chars.stderr b/src/tools/clippy/tests/ui/min_ident_chars.stderr
index 66a63f657..4dff6588b 100644
--- a/src/tools/clippy/tests/ui/min_ident_chars.stderr
+++ b/src/tools/clippy/tests/ui/min_ident_chars.stderr
@@ -1,5 +1,5 @@
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:9:8
+ --> $DIR/min_ident_chars.rs:8:8
|
LL | struct A {
| ^
@@ -7,169 +7,169 @@ LL | struct A {
= note: `-D clippy::min-ident-chars` implied by `-D warnings`
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:10:5
+ --> $DIR/min_ident_chars.rs:9:5
|
LL | a: u32,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:12:5
+ --> $DIR/min_ident_chars.rs:11:5
|
LL | A: u32,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:13:5
+ --> $DIR/min_ident_chars.rs:12:5
|
LL | I: u32,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:16:8
+ --> $DIR/min_ident_chars.rs:15:8
|
LL | struct B(u32);
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:18:8
+ --> $DIR/min_ident_chars.rs:17:8
|
LL | struct O {
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:19:5
+ --> $DIR/min_ident_chars.rs:18:5
|
LL | o: u32,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:24:6
+ --> $DIR/min_ident_chars.rs:23:6
|
LL | enum C {
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:25:5
+ --> $DIR/min_ident_chars.rs:24:5
|
LL | D,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:26:5
+ --> $DIR/min_ident_chars.rs:25:5
|
LL | E,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:27:5
+ --> $DIR/min_ident_chars.rs:26:5
|
LL | F,
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:51:9
+ --> $DIR/min_ident_chars.rs:50:9
|
LL | let h = 1;
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:52:9
+ --> $DIR/min_ident_chars.rs:51:9
|
LL | let e = 2;
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:53:9
+ --> $DIR/min_ident_chars.rs:52:9
|
LL | let l = 3;
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:54:9
+ --> $DIR/min_ident_chars.rs:53:9
|
LL | let l = 4;
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:55:9
+ --> $DIR/min_ident_chars.rs:54:9
|
LL | let o = 6;
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:59:10
+ --> $DIR/min_ident_chars.rs:58:10
|
LL | let (h, o, w) = (1, 2, 3);
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:59:13
+ --> $DIR/min_ident_chars.rs:58:13
|
LL | let (h, o, w) = (1, 2, 3);
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:60:10
+ --> $DIR/min_ident_chars.rs:59:10
|
LL | for (a, (r, e)) in (0..1000).enumerate().enumerate() {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:60:14
+ --> $DIR/min_ident_chars.rs:59:14
|
LL | for (a, (r, e)) in (0..1000).enumerate().enumerate() {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:60:17
+ --> $DIR/min_ident_chars.rs:59:17
|
LL | for (a, (r, e)) in (0..1000).enumerate().enumerate() {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:62:16
+ --> $DIR/min_ident_chars.rs:61:16
|
LL | while let (d, o, _i, n, g) = (true, true, false, false, true) {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:62:19
+ --> $DIR/min_ident_chars.rs:61:19
|
LL | while let (d, o, _i, n, g) = (true, true, false, false, true) {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:62:29
+ --> $DIR/min_ident_chars.rs:61:29
|
LL | while let (d, o, _i, n, g) = (true, true, false, false, true) {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:66:9
+ --> $DIR/min_ident_chars.rs:65:9
|
LL | let o = 1;
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:67:9
+ --> $DIR/min_ident_chars.rs:66:9
|
LL | let o = O { o };
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:81:4
+ --> $DIR/min_ident_chars.rs:80:4
|
LL | fn b() {}
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:82:21
+ --> $DIR/min_ident_chars.rs:81:21
|
LL | fn wrong_pythagoras(a: f32, b: f32) -> f32 {
| ^
error: this ident consists of a single char
- --> $DIR/min_ident_chars.rs:82:29
+ --> $DIR/min_ident_chars.rs:81:29
|
LL | fn wrong_pythagoras(a: f32, b: f32) -> f32 {
| ^
diff --git a/src/tools/clippy/tests/ui/min_max.rs b/src/tools/clippy/tests/ui/min_max.rs
index 24e52afd6..1215a0228 100644
--- a/src/tools/clippy/tests/ui/min_max.rs
+++ b/src/tools/clippy/tests/ui/min_max.rs
@@ -1,9 +1,7 @@
#![warn(clippy::all)]
#![allow(clippy::manual_clamp)]
-use std::cmp::max as my_max;
-use std::cmp::min as my_min;
-use std::cmp::{max, min};
+use std::cmp::{max as my_max, max, min as my_min, min};
const LARGE: usize = 3;
diff --git a/src/tools/clippy/tests/ui/min_max.stderr b/src/tools/clippy/tests/ui/min_max.stderr
index 069d90686..402b094f4 100644
--- a/src/tools/clippy/tests/ui/min_max.stderr
+++ b/src/tools/clippy/tests/ui/min_max.stderr
@@ -1,5 +1,5 @@
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:24:5
+ --> $DIR/min_max.rs:22:5
|
LL | min(1, max(3, x));
| ^^^^^^^^^^^^^^^^^
@@ -7,73 +7,73 @@ LL | min(1, max(3, x));
= note: `-D clippy::min-max` implied by `-D warnings`
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:25:5
+ --> $DIR/min_max.rs:23:5
|
LL | min(max(3, x), 1);
| ^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:26:5
+ --> $DIR/min_max.rs:24:5
|
LL | max(min(x, 1), 3);
| ^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:27:5
+ --> $DIR/min_max.rs:25:5
|
LL | max(3, min(x, 1));
| ^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:29:5
+ --> $DIR/min_max.rs:27:5
|
LL | my_max(3, my_min(x, 1));
| ^^^^^^^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:39:5
+ --> $DIR/min_max.rs:37:5
|
LL | min("Apple", max("Zoo", s));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:40:5
+ --> $DIR/min_max.rs:38:5
|
LL | max(min(s, "Apple"), "Zoo");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:45:5
+ --> $DIR/min_max.rs:43:5
|
LL | x.min(1).max(3);
| ^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:46:5
+ --> $DIR/min_max.rs:44:5
|
LL | x.max(3).min(1);
| ^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:47:5
+ --> $DIR/min_max.rs:45:5
|
LL | f.max(3f32).min(1f32);
| ^^^^^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:53:5
+ --> $DIR/min_max.rs:51:5
|
LL | max(x.min(1), 3);
| ^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:56:5
+ --> $DIR/min_max.rs:54:5
|
LL | s.max("Zoo").min("Apple");
| ^^^^^^^^^^^^^^^^^^^^^^^^^
error: this `min`/`max` combination leads to constant result
- --> $DIR/min_max.rs:57:5
+ --> $DIR/min_max.rs:55:5
|
LL | s.min("Apple").max("Zoo");
| ^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs
index b1980b1b5..3aaee67e1 100644
--- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs
@@ -99,4 +99,5 @@ impl const Drop for D {
}
// Lint this, since it can be dropped in const contexts
+// FIXME(effects)
fn d(this: D) {}
diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr
index 7be2cc0ca..66cf4e315 100644
--- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr
+++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr
@@ -89,11 +89,5 @@ LL | | 46
LL | | }
| |_^
-error: this could be a `const fn`
- --> $DIR/could_be_const.rs:102:1
- |
-LL | fn d(this: D) {}
- | ^^^^^^^^^^^^^^^^
-
-error: aborting due to 12 previous errors
+error: aborting due to 11 previous errors
diff --git a/src/tools/clippy/tests/ui/missing_doc.rs b/src/tools/clippy/tests/ui/missing_doc.rs
index cff1706a8..83ebf09c8 100644
--- a/src/tools/clippy/tests/ui/missing_doc.rs
+++ b/src/tools/clippy/tests/ui/missing_doc.rs
@@ -96,10 +96,8 @@ mod internal_impl {
}
/// dox
pub mod public_interface {
- pub use crate::internal_impl::documented as foo;
pub use crate::internal_impl::globbed::*;
- pub use crate::internal_impl::undocumented1 as bar;
- pub use crate::internal_impl::{documented, undocumented2};
+ pub use crate::internal_impl::{documented as foo, documented, undocumented1 as bar, undocumented2};
}
fn main() {}
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop.stderr b/src/tools/clippy/tests/ui/missing_spin_loop.stderr
index 485da00dc..5795c2c21 100644
--- a/src/tools/clippy/tests/ui/missing_spin_loop.stderr
+++ b/src/tools/clippy/tests/ui/missing_spin_loop.stderr
@@ -2,7 +2,7 @@ error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop.rs:11:37
|
LL | while b.load(Ordering::Acquire) {}
- | ^^ help: try this: `{ std::hint::spin_loop() }`
+ | ^^ help: try: `{ std::hint::spin_loop() }`
|
= note: `-D clippy::missing-spin-loop` implied by `-D warnings`
@@ -10,31 +10,31 @@ error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop.rs:13:37
|
LL | while !b.load(Ordering::SeqCst) {}
- | ^^ help: try this: `{ std::hint::spin_loop() }`
+ | ^^ help: try: `{ std::hint::spin_loop() }`
error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop.rs:15:46
|
LL | while b.load(Ordering::Acquire) == false {}
- | ^^ help: try this: `{ std::hint::spin_loop() }`
+ | ^^ help: try: `{ std::hint::spin_loop() }`
error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop.rs:17:49
|
LL | while { true == b.load(Ordering::Acquire) } {}
- | ^^ help: try this: `{ std::hint::spin_loop() }`
+ | ^^ help: try: `{ std::hint::spin_loop() }`
error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop.rs:19:93
|
LL | while b.compare_exchange(true, false, Ordering::Acquire, Ordering::Relaxed) != Ok(true) {}
- | ^^ help: try this: `{ std::hint::spin_loop() }`
+ | ^^ help: try: `{ std::hint::spin_loop() }`
error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop.rs:21:94
|
LL | while Ok(false) != b.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) {}
- | ^^ help: try this: `{ std::hint::spin_loop() }`
+ | ^^ help: try: `{ std::hint::spin_loop() }`
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
index 2b3b6873c..3322a7aae 100644
--- a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
+++ b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr
@@ -2,7 +2,7 @@ error: busy-waiting loop should at least have a spin loop hint
--> $DIR/missing_spin_loop_no_std.rs:13:37
|
LL | while b.load(Ordering::Acquire) {}
- | ^^ help: try this: `{ core::hint::spin_loop() }`
+ | ^^ help: try: `{ core::hint::spin_loop() }`
|
= note: `-D clippy::missing-spin-loop` implied by `-D warnings`
diff --git a/src/tools/clippy/tests/ui/must_use_candidates.fixed b/src/tools/clippy/tests/ui/must_use_candidates.fixed
index 0c275504d..3ca20c07d 100644
--- a/src/tools/clippy/tests/ui/must_use_candidates.fixed
+++ b/src/tools/clippy/tests/ui/must_use_candidates.fixed
@@ -1,6 +1,11 @@
//@run-rustfix
#![feature(never_type)]
-#![allow(unused_mut, unused_tuple_struct_fields, clippy::redundant_allocation)]
+#![allow(
+ unused_mut,
+ unused_tuple_struct_fields,
+ clippy::redundant_allocation,
+ clippy::needless_pass_by_ref_mut
+)]
#![warn(clippy::must_use_candidate)]
use std::rc::Rc;
use std::sync::atomic::{AtomicBool, Ordering};
diff --git a/src/tools/clippy/tests/ui/must_use_candidates.rs b/src/tools/clippy/tests/ui/must_use_candidates.rs
index d1c926773..dc4e0118e 100644
--- a/src/tools/clippy/tests/ui/must_use_candidates.rs
+++ b/src/tools/clippy/tests/ui/must_use_candidates.rs
@@ -1,6 +1,11 @@
//@run-rustfix
#![feature(never_type)]
-#![allow(unused_mut, unused_tuple_struct_fields, clippy::redundant_allocation)]
+#![allow(
+ unused_mut,
+ unused_tuple_struct_fields,
+ clippy::redundant_allocation,
+ clippy::needless_pass_by_ref_mut
+)]
#![warn(clippy::must_use_candidate)]
use std::rc::Rc;
use std::sync::atomic::{AtomicBool, Ordering};
diff --git a/src/tools/clippy/tests/ui/must_use_candidates.stderr b/src/tools/clippy/tests/ui/must_use_candidates.stderr
index 0fa3849d0..5fb302ccb 100644
--- a/src/tools/clippy/tests/ui/must_use_candidates.stderr
+++ b/src/tools/clippy/tests/ui/must_use_candidates.stderr
@@ -1,5 +1,5 @@
error: this function could have a `#[must_use]` attribute
- --> $DIR/must_use_candidates.rs:12:1
+ --> $DIR/must_use_candidates.rs:17:1
|
LL | pub fn pure(i: u8) -> u8 {
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn pure(i: u8) -> u8`
@@ -7,25 +7,25 @@ LL | pub fn pure(i: u8) -> u8 {
= note: `-D clippy::must-use-candidate` implied by `-D warnings`
error: this method could have a `#[must_use]` attribute
- --> $DIR/must_use_candidates.rs:17:5
+ --> $DIR/must_use_candidates.rs:22:5
|
LL | pub fn inherent_pure(&self) -> u8 {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn inherent_pure(&self) -> u8`
error: this function could have a `#[must_use]` attribute
- --> $DIR/must_use_candidates.rs:48:1
+ --> $DIR/must_use_candidates.rs:53:1
|
LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool`
error: this function could have a `#[must_use]` attribute
- --> $DIR/must_use_candidates.rs:60:1
+ --> $DIR/must_use_candidates.rs:65:1
|
LL | pub fn rcd(_x: Rc<u32>) -> bool {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn rcd(_x: Rc<u32>) -> bool`
error: this function could have a `#[must_use]` attribute
- --> $DIR/must_use_candidates.rs:68:1
+ --> $DIR/must_use_candidates.rs:73:1
|
LL | pub fn arcd(_x: Arc<u32>) -> bool {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn arcd(_x: Arc<u32>) -> bool`
diff --git a/src/tools/clippy/tests/ui/mut_from_ref.rs b/src/tools/clippy/tests/ui/mut_from_ref.rs
index 7de153305..8c0c23b65 100644
--- a/src/tools/clippy/tests/ui/mut_from_ref.rs
+++ b/src/tools/clippy/tests/ui/mut_from_ref.rs
@@ -1,4 +1,4 @@
-#![allow(unused, clippy::needless_lifetimes)]
+#![allow(unused, clippy::needless_lifetimes, clippy::needless_pass_by_ref_mut)]
#![warn(clippy::mut_from_ref)]
struct Foo;
diff --git a/src/tools/clippy/tests/ui/mut_key.rs b/src/tools/clippy/tests/ui/mut_key.rs
index 1c0ba6645..15d68c089 100644
--- a/src/tools/clippy/tests/ui/mut_key.rs
+++ b/src/tools/clippy/tests/ui/mut_key.rs
@@ -2,7 +2,8 @@ use std::cell::Cell;
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::hash::{Hash, Hasher};
use std::rc::Rc;
-use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
+use std::sync::atomic::AtomicUsize;
+use std::sync::atomic::Ordering::Relaxed;
use std::sync::Arc;
struct Key(AtomicUsize);
diff --git a/src/tools/clippy/tests/ui/mut_key.stderr b/src/tools/clippy/tests/ui/mut_key.stderr
index 25dd029b1..95b9546bf 100644
--- a/src/tools/clippy/tests/ui/mut_key.stderr
+++ b/src/tools/clippy/tests/ui/mut_key.stderr
@@ -1,5 +1,5 @@
error: mutable key type
- --> $DIR/mut_key.rs:30:32
+ --> $DIR/mut_key.rs:31:32
|
LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> {
| ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -7,97 +7,97 @@ LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> Hash
= note: `-D clippy::mutable-key-type` implied by `-D warnings`
error: mutable key type
- --> $DIR/mut_key.rs:30:72
+ --> $DIR/mut_key.rs:31:72
|
LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> {
| ^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:31:5
+ --> $DIR/mut_key.rs:32:5
|
LL | let _other: HashMap<Key, bool> = HashMap::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:58:22
+ --> $DIR/mut_key.rs:59:22
|
LL | fn tuples_bad<U>(_m: &mut HashMap<(Key, U), bool>) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:70:5
+ --> $DIR/mut_key.rs:71:5
|
LL | let _map = HashMap::<Cell<usize>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:71:5
+ --> $DIR/mut_key.rs:72:5
|
LL | let _map = HashMap::<&mut Cell<usize>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:72:5
+ --> $DIR/mut_key.rs:73:5
|
LL | let _map = HashMap::<&mut usize, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:74:5
+ --> $DIR/mut_key.rs:75:5
|
LL | let _map = HashMap::<Vec<Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:75:5
+ --> $DIR/mut_key.rs:76:5
|
LL | let _map = HashMap::<BTreeMap<Cell<usize>, ()>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:76:5
+ --> $DIR/mut_key.rs:77:5
|
LL | let _map = HashMap::<BTreeMap<(), Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:77:5
+ --> $DIR/mut_key.rs:78:5
|
LL | let _map = HashMap::<BTreeSet<Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:78:5
+ --> $DIR/mut_key.rs:79:5
|
LL | let _map = HashMap::<Option<Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:79:5
+ --> $DIR/mut_key.rs:80:5
|
LL | let _map = HashMap::<Option<Vec<Cell<usize>>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:80:5
+ --> $DIR/mut_key.rs:81:5
|
LL | let _map = HashMap::<Result<&mut usize, ()>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:82:5
+ --> $DIR/mut_key.rs:83:5
|
LL | let _map = HashMap::<Box<Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:83:5
+ --> $DIR/mut_key.rs:84:5
|
LL | let _map = HashMap::<Rc<Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: mutable key type
- --> $DIR/mut_key.rs:84:5
+ --> $DIR/mut_key.rs:85:5
|
LL | let _map = HashMap::<Arc<Cell<usize>>, usize>::new();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/mut_mut.rs b/src/tools/clippy/tests/ui/mut_mut.rs
index b72134283..fe7d53e8e 100644
--- a/src/tools/clippy/tests/ui/mut_mut.rs
+++ b/src/tools/clippy/tests/ui/mut_mut.rs
@@ -1,7 +1,12 @@
//@aux-build:proc_macros.rs:proc-macro
#![warn(clippy::mut_mut)]
#![allow(unused)]
-#![allow(clippy::no_effect, clippy::uninlined_format_args, clippy::unnecessary_operation)]
+#![allow(
+ clippy::no_effect,
+ clippy::uninlined_format_args,
+ clippy::unnecessary_operation,
+ clippy::needless_pass_by_ref_mut
+)]
extern crate proc_macros;
use proc_macros::{external, inline_macros};
diff --git a/src/tools/clippy/tests/ui/mut_mut.stderr b/src/tools/clippy/tests/ui/mut_mut.stderr
index 93b857eb2..58a1c4e68 100644
--- a/src/tools/clippy/tests/ui/mut_mut.stderr
+++ b/src/tools/clippy/tests/ui/mut_mut.stderr
@@ -1,5 +1,5 @@
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:9:11
+ --> $DIR/mut_mut.rs:14:11
|
LL | fn fun(x: &mut &mut u32) -> bool {
| ^^^^^^^^^^^^^
@@ -7,13 +7,13 @@ LL | fn fun(x: &mut &mut u32) -> bool {
= note: `-D clippy::mut-mut` implied by `-D warnings`
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:26:17
+ --> $DIR/mut_mut.rs:31:17
|
LL | let mut x = &mut &mut 1u32;
| ^^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:41:25
+ --> $DIR/mut_mut.rs:46:25
|
LL | let mut z = inline!(&mut $(&mut 3u32));
| ^
@@ -21,37 +21,37 @@ LL | let mut z = inline!(&mut $(&mut 3u32));
= note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this expression mutably borrows a mutable reference. Consider reborrowing
- --> $DIR/mut_mut.rs:28:21
+ --> $DIR/mut_mut.rs:33:21
|
LL | let mut y = &mut x;
| ^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:32:32
+ --> $DIR/mut_mut.rs:37:32
|
LL | let y: &mut &mut u32 = &mut &mut 2;
| ^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:32:16
+ --> $DIR/mut_mut.rs:37:16
|
LL | let y: &mut &mut u32 = &mut &mut 2;
| ^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:37:37
+ --> $DIR/mut_mut.rs:42:37
|
LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
| ^^^^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:37:16
+ --> $DIR/mut_mut.rs:42:16
|
LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
| ^^^^^^^^^^^^^^^^^^
error: generally you want to avoid `&mut &mut _` if possible
- --> $DIR/mut_mut.rs:37:21
+ --> $DIR/mut_mut.rs:42:21
|
LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2;
| ^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/mut_reference.rs b/src/tools/clippy/tests/ui/mut_reference.rs
index 73906121c..00661c51a 100644
--- a/src/tools/clippy/tests/ui/mut_reference.rs
+++ b/src/tools/clippy/tests/ui/mut_reference.rs
@@ -1,8 +1,21 @@
-#![allow(unused_variables)]
+#![allow(unused_variables, dead_code)]
fn takes_an_immutable_reference(a: &i32) {}
fn takes_a_mutable_reference(a: &mut i32) {}
+mod issue11268 {
+ macro_rules! x {
+ ($f:expr) => {
+ $f(&mut 1);
+ };
+ }
+
+ fn f() {
+ x!(super::takes_an_immutable_reference);
+ x!(super::takes_a_mutable_reference);
+ }
+}
+
struct MyStruct;
impl MyStruct {
diff --git a/src/tools/clippy/tests/ui/mut_reference.stderr b/src/tools/clippy/tests/ui/mut_reference.stderr
index 062d30b26..4ce1cfa4f 100644
--- a/src/tools/clippy/tests/ui/mut_reference.stderr
+++ b/src/tools/clippy/tests/ui/mut_reference.stderr
@@ -1,5 +1,5 @@
error: the function `takes_an_immutable_reference` doesn't need a mutable reference
- --> $DIR/mut_reference.rs:17:34
+ --> $DIR/mut_reference.rs:30:34
|
LL | takes_an_immutable_reference(&mut 42);
| ^^^^^^^
@@ -7,13 +7,13 @@ LL | takes_an_immutable_reference(&mut 42);
= note: `-D clippy::unnecessary-mut-passed` implied by `-D warnings`
error: the function `as_ptr` doesn't need a mutable reference
- --> $DIR/mut_reference.rs:19:12
+ --> $DIR/mut_reference.rs:32:12
|
LL | as_ptr(&mut 42);
| ^^^^^^^
error: the method `takes_an_immutable_reference` doesn't need a mutable reference
- --> $DIR/mut_reference.rs:23:44
+ --> $DIR/mut_reference.rs:36:44
|
LL | my_struct.takes_an_immutable_reference(&mut 42);
| ^^^^^^^
diff --git a/src/tools/clippy/tests/ui/needless_borrow_pat.stderr b/src/tools/clippy/tests/ui/needless_borrow_pat.stderr
index db3b52b88..2d9b8f159 100644
--- a/src/tools/clippy/tests/ui/needless_borrow_pat.stderr
+++ b/src/tools/clippy/tests/ui/needless_borrow_pat.stderr
@@ -2,7 +2,7 @@ error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:59:14
|
LL | Some(ref x) => x,
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
|
= note: `-D clippy::needless-borrow` implied by `-D warnings`
@@ -12,7 +12,7 @@ error: this pattern creates a reference to a reference
LL | Some(ref x) => *x,
| ^^^^^
|
-help: try this
+help: try
|
LL | Some(x) => x,
| ~ ~
@@ -23,7 +23,7 @@ error: this pattern creates a reference to a reference
LL | Some(ref x) => {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ Some(x) => {
LL | f1(x);
@@ -34,13 +34,13 @@ error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:81:14
|
LL | Some(ref x) => m1!(x),
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:86:15
|
LL | let _ = |&ref x: &&String| {
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:91:10
@@ -48,7 +48,7 @@ error: this pattern creates a reference to a reference
LL | let (ref y,) = (&x,);
| ^^^^^
|
-help: try this
+help: try
|
LL ~ let (y,) = (&x,);
LL ~ let _: &String = y;
@@ -58,7 +58,7 @@ error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:101:14
|
LL | Some(ref x) => x.0,
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:111:14
@@ -66,7 +66,7 @@ error: this pattern creates a reference to a reference
LL | E::A(ref x) | E::B(ref x) => *x,
| ^^^^^ ^^^^^
|
-help: try this
+help: try
|
LL | E::A(x) | E::B(x) => x,
| ~ ~ ~
@@ -75,7 +75,7 @@ error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:117:21
|
LL | if let Some(ref x) = Some(&String::new());
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:125:12
@@ -83,7 +83,7 @@ error: this pattern creates a reference to a reference
LL | fn f2<'a>(&ref x: &&'a String) -> &'a String {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ fn f2<'a>(&x: &&'a String) -> &'a String {
LL | let _: &String = x;
@@ -94,7 +94,7 @@ error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:132:11
|
LL | fn f(&ref x: &&String) {
- | ^^^^^ help: try this: `x`
+ | ^^^^^ help: try: `x`
error: this pattern creates a reference to a reference
--> $DIR/needless_borrow_pat.rs:140:11
@@ -102,7 +102,7 @@ error: this pattern creates a reference to a reference
LL | fn f(&ref x: &&String) {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ fn f(&x: &&String) {
LL ~ let _: &String = x;
diff --git a/src/tools/clippy/tests/ui/needless_else.stderr b/src/tools/clippy/tests/ui/needless_else.stderr
index ea6930851..49cd78501 100644
--- a/src/tools/clippy/tests/ui/needless_else.stderr
+++ b/src/tools/clippy/tests/ui/needless_else.stderr
@@ -1,4 +1,4 @@
-error: this else branch is empty
+error: this `else` branch is empty
--> $DIR/needless_else.rs:24:7
|
LL | } else {
diff --git a/src/tools/clippy/tests/ui/needless_if.fixed b/src/tools/clippy/tests/ui/needless_if.fixed
index 5e6e140c2..6001c9e93 100644
--- a/src/tools/clippy/tests/ui/needless_if.fixed
+++ b/src/tools/clippy/tests/ui/needless_if.fixed
@@ -16,8 +16,7 @@
#![warn(clippy::needless_if)]
extern crate proc_macros;
-use proc_macros::external;
-use proc_macros::with_span;
+use proc_macros::{external, with_span};
fn maybe_side_effect() -> bool {
true
diff --git a/src/tools/clippy/tests/ui/needless_if.rs b/src/tools/clippy/tests/ui/needless_if.rs
index eb28ce73b..c6be4766d 100644
--- a/src/tools/clippy/tests/ui/needless_if.rs
+++ b/src/tools/clippy/tests/ui/needless_if.rs
@@ -16,8 +16,7 @@
#![warn(clippy::needless_if)]
extern crate proc_macros;
-use proc_macros::external;
-use proc_macros::with_span;
+use proc_macros::{external, with_span};
fn maybe_side_effect() -> bool {
true
diff --git a/src/tools/clippy/tests/ui/needless_if.stderr b/src/tools/clippy/tests/ui/needless_if.stderr
index 5cb42c369..14de40095 100644
--- a/src/tools/clippy/tests/ui/needless_if.stderr
+++ b/src/tools/clippy/tests/ui/needless_if.stderr
@@ -1,5 +1,5 @@
error: this `if` branch is empty
- --> $DIR/needless_if.rs:28:5
+ --> $DIR/needless_if.rs:27:5
|
LL | if (true) {}
| ^^^^^^^^^^^^ help: you can remove it
@@ -7,13 +7,13 @@ LL | if (true) {}
= note: `-D clippy::needless-if` implied by `-D warnings`
error: this `if` branch is empty
- --> $DIR/needless_if.rs:30:5
+ --> $DIR/needless_if.rs:29:5
|
LL | if maybe_side_effect() {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `maybe_side_effect();`
error: this `if` branch is empty
- --> $DIR/needless_if.rs:35:5
+ --> $DIR/needless_if.rs:34:5
|
LL | / if {
LL | | return;
@@ -28,7 +28,7 @@ LL + });
|
error: this `if` branch is empty
- --> $DIR/needless_if.rs:47:5
+ --> $DIR/needless_if.rs:46:5
|
LL | / if {
LL | | if let true = true && true { true } else { false }
@@ -44,19 +44,19 @@ LL + } && true);
|
error: this `if` branch is empty
- --> $DIR/needless_if.rs:85:5
+ --> $DIR/needless_if.rs:84:5
|
LL | if { maybe_side_effect() } {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `({ maybe_side_effect() });`
error: this `if` branch is empty
- --> $DIR/needless_if.rs:87:5
+ --> $DIR/needless_if.rs:86:5
|
LL | if { maybe_side_effect() } && true {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `({ maybe_side_effect() } && true);`
error: this `if` branch is empty
- --> $DIR/needless_if.rs:91:5
+ --> $DIR/needless_if.rs:90:5
|
LL | if true {}
| ^^^^^^^^^^ help: you can remove it: `true;`
diff --git a/src/tools/clippy/tests/ui/needless_option_as_deref.stderr b/src/tools/clippy/tests/ui/needless_option_as_deref.stderr
index 20d28a968..4c0d502a2 100644
--- a/src/tools/clippy/tests/ui/needless_option_as_deref.stderr
+++ b/src/tools/clippy/tests/ui/needless_option_as_deref.stderr
@@ -2,7 +2,7 @@ error: derefed type is same as origin
--> $DIR/needless_option_as_deref.rs:9:29
|
LL | let _: Option<&usize> = Some(&1).as_deref();
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `Some(&1)`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `Some(&1)`
|
= note: `-D clippy::needless-option-as-deref` implied by `-D warnings`
@@ -10,13 +10,13 @@ error: derefed type is same as origin
--> $DIR/needless_option_as_deref.rs:10:33
|
LL | let _: Option<&mut usize> = Some(&mut 1).as_deref_mut();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Some(&mut 1)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(&mut 1)`
error: derefed type is same as origin
--> $DIR/needless_option_as_deref.rs:14:13
|
LL | let _ = x.as_deref_mut();
- | ^^^^^^^^^^^^^^^^ help: try this: `x`
+ | ^^^^^^^^^^^^^^^^ help: try: `x`
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs
new file mode 100644
index 000000000..4e79e5f53
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs
@@ -0,0 +1,254 @@
+#![allow(clippy::if_same_then_else, clippy::no_effect)]
+#![feature(lint_reasons)]
+
+// just ignore everywhere for now
+//@ignore-32bit
+//@ignore-64bit
+
+use std::ptr::NonNull;
+
+fn foo(s: &mut Vec<u32>, b: &u32, x: &mut u32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ *x += *b + s.len() as u32;
+}
+
+// Should not warn.
+fn foo2(s: &mut Vec<u32>) {
+ s.push(8);
+}
+
+// Should not warn because we return it.
+fn foo3(s: &mut Vec<u32>) -> &mut Vec<u32> {
+ s
+}
+
+// Should not warn because `s` is used as mutable.
+fn foo4(s: &mut Vec<u32>) {
+ Vec::push(s, 4);
+}
+
+// Should not warn.
+fn foo5(s: &mut Vec<u32>) {
+ foo2(s);
+}
+
+fn foo6(s: &mut Vec<u32>) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ non_mut_ref(s);
+}
+
+fn non_mut_ref(_: &Vec<u32>) {}
+
+struct Bar;
+
+impl Bar {
+ // Should not warn on `&mut self`.
+ fn bar(&mut self) {}
+
+ fn mushroom(&self, vec: &mut Vec<i32>) -> usize {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ vec.len()
+ }
+
+ fn badger(&mut self, vec: &mut Vec<i32>) -> usize {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ vec.len()
+ }
+}
+
+trait Babar {
+ // Should not warn here since it's a trait method.
+ fn method(arg: &mut u32);
+}
+
+impl Babar for Bar {
+ // Should not warn here since it's a trait method.
+ fn method(a: &mut u32) {}
+}
+
+// Should not warn (checking variable aliasing).
+fn alias_check(s: &mut Vec<u32>) {
+ let mut alias = s;
+ let mut alias2 = alias;
+ let mut alias3 = alias2;
+ alias3.push(0);
+}
+
+// Should not warn (checking variable aliasing).
+fn alias_check2(mut s: &mut Vec<u32>) {
+ let mut alias = &mut s;
+ alias.push(0);
+}
+
+struct Mut<T> {
+ ptr: NonNull<T>,
+}
+
+impl<T> Mut<T> {
+ // Should not warn because `NonNull::from` also accepts `&mut`.
+ fn new(ptr: &mut T) -> Self {
+ Mut {
+ ptr: NonNull::from(ptr),
+ }
+ }
+}
+
+// Should not warn.
+fn unused(_: &mut u32, _b: &mut u8) {}
+
+// Should not warn.
+async fn f1(x: &mut i32) {
+ *x += 1;
+}
+// Should not warn.
+async fn f2(x: &mut i32, y: String) {
+ *x += 1;
+}
+// Should not warn.
+async fn f3(x: &mut i32, y: String, z: String) {
+ *x += 1;
+}
+// Should not warn.
+async fn f4(x: &mut i32, y: i32) {
+ *x += 1;
+}
+// Should not warn.
+async fn f5(x: i32, y: &mut i32) {
+ *y += 1;
+}
+// Should not warn.
+async fn f6(x: i32, y: &mut i32, z: &mut i32) {
+ *y += 1;
+ *z += 1;
+}
+// Should not warn.
+async fn f7(x: &mut i32, y: i32, z: &mut i32, a: i32) {
+ *x += 1;
+ *z += 1;
+}
+
+async fn a1(x: &mut i32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", x);
+}
+async fn a2(x: &mut i32, y: String) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", x);
+}
+async fn a3(x: &mut i32, y: String, z: String) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", x);
+}
+async fn a4(x: &mut i32, y: i32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", x);
+}
+async fn a5(x: i32, y: &mut i32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", x);
+}
+async fn a6(x: i32, y: &mut i32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", x);
+}
+async fn a7(x: i32, y: i32, z: &mut i32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", z);
+}
+async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ println!("{:?}", z);
+}
+
+// Should not warn (passed as closure which takes `&mut`).
+fn passed_as_closure(s: &mut u32) {}
+
+// Should not warn.
+fn passed_as_local(s: &mut u32) {}
+
+// Should not warn.
+fn ty_unify_1(s: &mut u32) {}
+
+// Should not warn.
+fn ty_unify_2(s: &mut u32) {}
+
+// Should not warn.
+fn passed_as_field(s: &mut u32) {}
+
+fn closure_takes_mut(s: fn(&mut u32)) {}
+
+struct A {
+ s: fn(&mut u32),
+}
+
+// Should warn.
+fn used_as_path(s: &mut u32) {}
+
+// Make sure lint attributes work fine
+#[expect(clippy::needless_pass_by_ref_mut)]
+fn lint_attr(s: &mut u32) {}
+
+#[cfg(not(feature = "a"))]
+fn cfg_warn(s: &mut u32) {}
+//~^ ERROR: this argument is a mutable reference, but not used mutably
+//~| NOTE: this is cfg-gated and may require further changes
+
+#[cfg(not(feature = "a"))]
+mod foo {
+ fn cfg_warn(s: &mut u32) {}
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ //~| NOTE: this is cfg-gated and may require further changes
+}
+
+// Should not warn.
+async fn inner_async(x: &mut i32, y: &mut u32) {
+ async {
+ *y += 1;
+ *x += 1;
+ }
+ .await;
+}
+
+async fn inner_async2(x: &mut i32, y: &mut u32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ async {
+ *x += 1;
+ }
+ .await;
+}
+
+async fn inner_async3(x: &mut i32, y: &mut u32) {
+ //~^ ERROR: this argument is a mutable reference, but not used mutably
+ async {
+ *y += 1;
+ }
+ .await;
+}
+
+// Should not warn.
+async fn async_vec(b: &mut Vec<bool>) {
+ b.append(&mut vec![]);
+}
+
+// Should not warn.
+async fn async_vec2(b: &mut Vec<bool>) {
+ b.push(true);
+}
+
+fn main() {
+ let mut u = 0;
+ let mut v = vec![0];
+ foo(&mut v, &0, &mut u);
+ foo2(&mut v);
+ foo3(&mut v);
+ foo4(&mut v);
+ foo5(&mut v);
+ alias_check(&mut v);
+ alias_check2(&mut v);
+ println!("{u}");
+ closure_takes_mut(passed_as_closure);
+ A { s: passed_as_field };
+ used_as_path;
+ let _: fn(&mut u32) = passed_as_local;
+ let _ = if v[0] == 0 { ty_unify_1 } else { ty_unify_2 };
+}
diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr
new file mode 100644
index 000000000..2e06e7252
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr
@@ -0,0 +1,110 @@
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:6:11
+ |
+LL | fn foo(s: &mut Vec<u32>, b: &u32, x: &mut u32) {
+ | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<u32>`
+ |
+ = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:31:12
+ |
+LL | fn foo6(s: &mut Vec<u32>) {
+ | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<u32>`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:44:29
+ |
+LL | fn mushroom(&self, vec: &mut Vec<i32>) -> usize {
+ | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<i32>`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:49:31
+ |
+LL | fn badger(&mut self, vec: &mut Vec<i32>) -> usize {
+ | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<i32>`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:126:16
+ |
+LL | async fn a1(x: &mut i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:130:16
+ |
+LL | async fn a2(x: &mut i32, y: String) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:134:16
+ |
+LL | async fn a3(x: &mut i32, y: String, z: String) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:138:16
+ |
+LL | async fn a4(x: &mut i32, y: i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:142:24
+ |
+LL | async fn a5(x: i32, y: &mut i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:146:24
+ |
+LL | async fn a6(x: i32, y: &mut i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:150:32
+ |
+LL | async fn a7(x: i32, y: i32, z: &mut i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:154:24
+ |
+LL | async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:154:45
+ |
+LL | async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:188:16
+ |
+LL | fn cfg_warn(s: &mut u32) {}
+ | ^^^^^^^^ help: consider changing to: `&u32`
+ |
+ = note: this is cfg-gated and may require further changes
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:194:20
+ |
+LL | fn cfg_warn(s: &mut u32) {}
+ | ^^^^^^^^ help: consider changing to: `&u32`
+ |
+ = note: this is cfg-gated and may require further changes
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:208:39
+ |
+LL | async fn inner_async2(x: &mut i32, y: &mut u32) {
+ | ^^^^^^^^ help: consider changing to: `&u32`
+
+error: this argument is a mutable reference, but not used mutably
+ --> $DIR/needless_pass_by_ref_mut.rs:216:26
+ |
+LL | async fn inner_async3(x: &mut i32, y: &mut u32) {
+ | ^^^^^^^^ help: consider changing to: `&i32`
+
+error: aborting due to 17 previous errors
+
diff --git a/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed b/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed
new file mode 100644
index 000000000..d6e47d07b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_return_with_question_mark.fixed
@@ -0,0 +1,40 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(
+ clippy::needless_return,
+ clippy::no_effect,
+ clippy::unit_arg,
+ clippy::useless_conversion,
+ unused
+)]
+
+#[macro_use]
+extern crate proc_macros;
+
+fn a() -> u32 {
+ return 0;
+}
+
+fn b() -> Result<u32, u32> {
+ return Err(0);
+}
+
+// Do not lint
+fn c() -> Option<()> {
+ return None?;
+}
+
+fn main() -> Result<(), ()> {
+ Err(())?;
+ return Ok::<(), ()>(());
+ Err(())?;
+ Ok::<(), ()>(());
+ return Err(().into());
+ external! {
+ return Err(())?;
+ }
+ with_span! {
+ return Err(())?;
+ }
+ Err(())
+}
diff --git a/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs b/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs
new file mode 100644
index 000000000..4fc04d363
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_return_with_question_mark.rs
@@ -0,0 +1,40 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(
+ clippy::needless_return,
+ clippy::no_effect,
+ clippy::unit_arg,
+ clippy::useless_conversion,
+ unused
+)]
+
+#[macro_use]
+extern crate proc_macros;
+
+fn a() -> u32 {
+ return 0;
+}
+
+fn b() -> Result<u32, u32> {
+ return Err(0);
+}
+
+// Do not lint
+fn c() -> Option<()> {
+ return None?;
+}
+
+fn main() -> Result<(), ()> {
+ return Err(())?;
+ return Ok::<(), ()>(());
+ Err(())?;
+ Ok::<(), ()>(());
+ return Err(().into());
+ external! {
+ return Err(())?;
+ }
+ with_span! {
+ return Err(())?;
+ }
+ Err(())
+}
diff --git a/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr b/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr
new file mode 100644
index 000000000..e1d91638d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/needless_return_with_question_mark.stderr
@@ -0,0 +1,10 @@
+error: unneeded `return` statement with `?` operator
+ --> $DIR/needless_return_with_question_mark.rs:28:5
+ |
+LL | return Err(())?;
+ | ^^^^^^^ help: remove it
+ |
+ = note: `-D clippy::needless-return-with-question-mark` implied by `-D warnings`
+
+error: aborting due to previous error
+
diff --git a/src/tools/clippy/tests/ui/needless_splitn.stderr b/src/tools/clippy/tests/ui/needless_splitn.stderr
index f607d8e1a..0005f7581 100644
--- a/src/tools/clippy/tests/ui/needless_splitn.stderr
+++ b/src/tools/clippy/tests/ui/needless_splitn.stderr
@@ -2,7 +2,7 @@ error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:14:13
|
LL | let _ = str.splitn(2, '=').next();
- | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')`
|
= note: `-D clippy::needless-splitn` implied by `-D warnings`
@@ -10,73 +10,73 @@ error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:15:13
|
LL | let _ = str.splitn(2, '=').nth(0);
- | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')`
error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:18:18
|
LL | let (_, _) = str.splitn(3, '=').next_tuple().unwrap();
- | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')`
error: unnecessary use of `rsplitn`
--> $DIR/needless_splitn.rs:21:13
|
LL | let _ = str.rsplitn(2, '=').next();
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `str.rsplit('=')`
error: unnecessary use of `rsplitn`
--> $DIR/needless_splitn.rs:22:13
|
LL | let _ = str.rsplitn(2, '=').nth(0);
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `str.rsplit('=')`
error: unnecessary use of `rsplitn`
--> $DIR/needless_splitn.rs:25:18
|
LL | let (_, _) = str.rsplitn(3, '=').next_tuple().unwrap();
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `str.rsplit('=')`
error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:27:13
|
LL | let _ = str.splitn(5, '=').next();
- | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')`
error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:28:13
|
LL | let _ = str.splitn(5, '=').nth(3);
- | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')`
error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:34:13
|
LL | let _ = s.splitn(2, '=').next()?;
- | ^^^^^^^^^^^^^^^^ help: try this: `s.split('=')`
+ | ^^^^^^^^^^^^^^^^ help: try: `s.split('=')`
error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:35:13
|
LL | let _ = s.splitn(2, '=').nth(0)?;
- | ^^^^^^^^^^^^^^^^ help: try this: `s.split('=')`
+ | ^^^^^^^^^^^^^^^^ help: try: `s.split('=')`
error: unnecessary use of `rsplitn`
--> $DIR/needless_splitn.rs:36:13
|
LL | let _ = s.rsplitn(2, '=').next()?;
- | ^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit('=')`
+ | ^^^^^^^^^^^^^^^^^ help: try: `s.rsplit('=')`
error: unnecessary use of `rsplitn`
--> $DIR/needless_splitn.rs:37:13
|
LL | let _ = s.rsplitn(2, '=').nth(0)?;
- | ^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit('=')`
+ | ^^^^^^^^^^^^^^^^^ help: try: `s.rsplit('=')`
error: unnecessary use of `splitn`
--> $DIR/needless_splitn.rs:45:13
|
LL | let _ = "key=value".splitn(2, '=').nth(0).unwrap();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split('=')`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split('=')`
error: aborting due to 13 previous errors
diff --git a/src/tools/clippy/tests/ui/numbered_fields.stderr b/src/tools/clippy/tests/ui/numbered_fields.stderr
index 60c0d7898..26f7ad904 100644
--- a/src/tools/clippy/tests/ui/numbered_fields.stderr
+++ b/src/tools/clippy/tests/ui/numbered_fields.stderr
@@ -7,7 +7,7 @@ LL | | 0: 1u32,
LL | | 1: 42,
LL | | 2: 23u8,
LL | | };
- | |_____^ help: try this instead: `TupleStruct(1u32, 42, 23u8)`
+ | |_____^ help: try: `TupleStruct(1u32, 42, 23u8)`
|
= note: `-D clippy::init-numbered-fields` implied by `-D warnings`
@@ -20,7 +20,7 @@ LL | | 0: 1u32,
LL | | 2: 2u8,
LL | | 1: 3u32,
LL | | };
- | |_____^ help: try this instead: `TupleStruct(1u32, 3u32, 2u8)`
+ | |_____^ help: try: `TupleStruct(1u32, 3u32, 2u8)`
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/option_env_unwrap.rs b/src/tools/clippy/tests/ui/option_env_unwrap.rs
index 65a1b467f..61dbad939 100644
--- a/src/tools/clippy/tests/ui/option_env_unwrap.rs
+++ b/src/tools/clippy/tests/ui/option_env_unwrap.rs
@@ -9,6 +9,7 @@ use proc_macros::{external, inline_macros};
fn main() {
let _ = option_env!("PATH").unwrap();
let _ = option_env!("PATH").expect("environment variable PATH isn't set");
+ let _ = option_env!("__Y__do_not_use").unwrap(); // This test only works if you don't have a __Y__do_not_use env variable in your environment.
let _ = inline!(option_env!($"PATH").unwrap());
let _ = inline!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
let _ = external!(option_env!($"PATH").unwrap());
diff --git a/src/tools/clippy/tests/ui/option_env_unwrap.stderr b/src/tools/clippy/tests/ui/option_env_unwrap.stderr
index 7bba62686..cfa9dd58a 100644
--- a/src/tools/clippy/tests/ui/option_env_unwrap.stderr
+++ b/src/tools/clippy/tests/ui/option_env_unwrap.stderr
@@ -16,7 +16,15 @@ LL | let _ = option_env!("PATH").expect("environment variable PATH isn't set
= help: consider using the `env!` macro instead
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:12:21
+ --> $DIR/option_env_unwrap.rs:12:13
+ |
+LL | let _ = option_env!("__Y__do_not_use").unwrap(); // This test only works if you don't have a __Y__do_not_use env variable in your env...
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider using the `env!` macro instead
+
+error: this will panic at run-time if the environment variable doesn't exist at compile-time
+ --> $DIR/option_env_unwrap.rs:13:21
|
LL | let _ = inline!(option_env!($"PATH").unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -25,7 +33,7 @@ LL | let _ = inline!(option_env!($"PATH").unwrap());
= note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:13:21
+ --> $DIR/option_env_unwrap.rs:14:21
|
LL | let _ = inline!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -34,7 +42,7 @@ LL | let _ = inline!(option_env!($"PATH").expect($"environment variable PATH
= note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:14:13
+ --> $DIR/option_env_unwrap.rs:15:13
|
LL | let _ = external!(option_env!($"PATH").unwrap());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -43,7 +51,7 @@ LL | let _ = external!(option_env!($"PATH").unwrap());
= note: this error originates in the macro `external` (in Nightly builds, run with -Z macro-backtrace for more info)
error: this will panic at run-time if the environment variable doesn't exist at compile-time
- --> $DIR/option_env_unwrap.rs:15:13
+ --> $DIR/option_env_unwrap.rs:16:13
|
LL | let _ = external!(option_env!($"PATH").expect($"environment variable PATH isn't set"));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -51,5 +59,5 @@ LL | let _ = external!(option_env!($"PATH").expect($"environment variable PA
= help: consider using the `env!` macro instead
= note: this error originates in the macro `external` (in Nightly builds, run with -Z macro-backtrace for more info)
-error: aborting due to 6 previous errors
+error: aborting due to 7 previous errors
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.fixed b/src/tools/clippy/tests/ui/option_if_let_else.fixed
index 8e59e4375..6fee3cce6 100644
--- a/src/tools/clippy/tests/ui/option_if_let_else.fixed
+++ b/src/tools/clippy/tests/ui/option_if_let_else.fixed
@@ -5,7 +5,8 @@
clippy::redundant_closure,
clippy::ref_option_ref,
clippy::equatable_if_let,
- clippy::let_unit_value
+ clippy::let_unit_value,
+ clippy::redundant_locals
)]
fn bad1(string: Option<&str>) -> (bool, &str) {
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.rs b/src/tools/clippy/tests/ui/option_if_let_else.rs
index e72edf2a8..4b3cf948a 100644
--- a/src/tools/clippy/tests/ui/option_if_let_else.rs
+++ b/src/tools/clippy/tests/ui/option_if_let_else.rs
@@ -5,7 +5,8 @@
clippy::redundant_closure,
clippy::ref_option_ref,
clippy::equatable_if_let,
- clippy::let_unit_value
+ clippy::let_unit_value,
+ clippy::redundant_locals
)]
fn bad1(string: Option<&str>) -> (bool, &str) {
diff --git a/src/tools/clippy/tests/ui/option_if_let_else.stderr b/src/tools/clippy/tests/ui/option_if_let_else.stderr
index aa2da2174..350f0f07e 100644
--- a/src/tools/clippy/tests/ui/option_if_let_else.stderr
+++ b/src/tools/clippy/tests/ui/option_if_let_else.stderr
@@ -1,5 +1,5 @@
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:12:5
+ --> $DIR/option_if_let_else.rs:13:5
|
LL | / if let Some(x) = string {
LL | | (true, x)
@@ -11,19 +11,19 @@ LL | | }
= note: `-D clippy::option-if-let-else` implied by `-D warnings`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:30:13
+ --> $DIR/option_if_let_else.rs:31:13
|
LL | let _ = if let Some(s) = *string { s.len() } else { 0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `string.map_or(0, |s| s.len())`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:31:13
+ --> $DIR/option_if_let_else.rs:32:13
|
LL | let _ = if let Some(s) = &num { s } else { &0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.as_ref().map_or(&0, |s| s)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:32:13
+ --> $DIR/option_if_let_else.rs:33:13
|
LL | let _ = if let Some(s) = &mut num {
| _____________^
@@ -43,13 +43,13 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:38:13
+ --> $DIR/option_if_let_else.rs:39:13
|
LL | let _ = if let Some(ref s) = num { s } else { &0 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `num.as_ref().map_or(&0, |s| s)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:39:13
+ --> $DIR/option_if_let_else.rs:40:13
|
LL | let _ = if let Some(mut s) = num {
| _____________^
@@ -69,7 +69,7 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:45:13
+ --> $DIR/option_if_let_else.rs:46:13
|
LL | let _ = if let Some(ref mut s) = num {
| _____________^
@@ -89,7 +89,7 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:54:5
+ --> $DIR/option_if_let_else.rs:55:5
|
LL | / if let Some(x) = arg {
LL | | let y = x * x;
@@ -108,7 +108,7 @@ LL + })
|
error: use Option::map_or_else instead of an if let/else
- --> $DIR/option_if_let_else.rs:67:13
+ --> $DIR/option_if_let_else.rs:68:13
|
LL | let _ = if let Some(x) = arg {
| _____________^
@@ -120,7 +120,7 @@ LL | | };
| |_____^ help: try: `arg.map_or_else(|| side_effect(), |x| x)`
error: use Option::map_or_else instead of an if let/else
- --> $DIR/option_if_let_else.rs:76:13
+ --> $DIR/option_if_let_else.rs:77:13
|
LL | let _ = if let Some(x) = arg {
| _____________^
@@ -143,7 +143,7 @@ LL ~ }, |x| x * x * x * x);
|
error: use Option::map_or_else instead of an if let/else
- --> $DIR/option_if_let_else.rs:109:13
+ --> $DIR/option_if_let_else.rs:110:13
|
LL | / if let Some(idx) = s.find('.') {
LL | | vec![s[..idx].to_string(), s[idx..].to_string()]
@@ -153,7 +153,7 @@ LL | | }
| |_____________^ help: try: `s.find('.').map_or_else(|| vec![s.to_string()], |idx| vec![s[..idx].to_string(), s[idx..].to_string()])`
error: use Option::map_or_else instead of an if let/else
- --> $DIR/option_if_let_else.rs:120:5
+ --> $DIR/option_if_let_else.rs:121:5
|
LL | / if let Ok(binding) = variable {
LL | | println!("Ok {binding}");
@@ -172,13 +172,13 @@ LL + })
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:142:13
+ --> $DIR/option_if_let_else.rs:143:13
|
LL | let _ = if let Some(x) = optional { x + 2 } else { 5 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `optional.map_or(5, |x| x + 2)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:152:13
+ --> $DIR/option_if_let_else.rs:153:13
|
LL | let _ = if let Some(x) = Some(0) {
| _____________^
@@ -200,13 +200,13 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:180:13
+ --> $DIR/option_if_let_else.rs:181:13
|
LL | let _ = if let Some(x) = Some(0) { s.len() + x } else { s.len() };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(0).map_or(s.len(), |x| s.len() + x)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:184:13
+ --> $DIR/option_if_let_else.rs:185:13
|
LL | let _ = if let Some(x) = Some(0) {
| _____________^
@@ -226,7 +226,7 @@ LL ~ });
|
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:223:13
+ --> $DIR/option_if_let_else.rs:224:13
|
LL | let _ = match s {
| _____________^
@@ -236,7 +236,7 @@ LL | | };
| |_____^ help: try: `s.map_or(1, |string| string.len())`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:227:13
+ --> $DIR/option_if_let_else.rs:228:13
|
LL | let _ = match Some(10) {
| _____________^
@@ -246,7 +246,7 @@ LL | | };
| |_____^ help: try: `Some(10).map_or(5, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:233:13
+ --> $DIR/option_if_let_else.rs:234:13
|
LL | let _ = match res {
| _____________^
@@ -256,7 +256,7 @@ LL | | };
| |_____^ help: try: `res.map_or(1, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:237:13
+ --> $DIR/option_if_let_else.rs:238:13
|
LL | let _ = match res {
| _____________^
@@ -266,13 +266,13 @@ LL | | };
| |_____^ help: try: `res.map_or(1, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:241:13
+ --> $DIR/option_if_let_else.rs:242:13
|
LL | let _ = if let Ok(a) = res { a + 1 } else { 5 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `res.map_or(5, |a| a + 1)`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:258:9
+ --> $DIR/option_if_let_else.rs:259:9
|
LL | / match initial {
LL | | Some(value) => do_something(value),
@@ -281,7 +281,7 @@ LL | | }
| |_________^ help: try: `initial.as_ref().map_or({}, |value| do_something(value))`
error: use Option::map_or instead of an if let/else
- --> $DIR/option_if_let_else.rs:265:9
+ --> $DIR/option_if_let_else.rs:266:9
|
LL | / match initial {
LL | | Some(value) => do_something2(value),
diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr
index 0305387b9..5be5f10b0 100644
--- a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr
+++ b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr
@@ -4,7 +4,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns
LL | x.field.map(do_nothing);
| ^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(x_field) = x.field { do_nothing(x_field) }`
+ | help: try: `if let Some(x_field) = x.field { do_nothing(x_field) }`
|
= note: `-D clippy::option-map-unit-fn` implied by `-D warnings`
@@ -14,7 +14,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns
LL | x.field.map(do_nothing);
| ^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(x_field) = x.field { do_nothing(x_field) }`
+ | help: try: `if let Some(x_field) = x.field { do_nothing(x_field) }`
error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:42:5
@@ -22,7 +22,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns
LL | x.field.map(diverge);
| ^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(x_field) = x.field { diverge(x_field) }`
+ | help: try: `if let Some(x_field) = x.field { diverge(x_field) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:48:5
@@ -30,7 +30,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| x.do_option_nothing(value + captured));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { x.do_option_nothing(value + captured) }`
+ | help: try: `if let Some(value) = x.field { x.do_option_nothing(value + captured) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:50:5
@@ -38,7 +38,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { x.do_option_plus_one(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { x.do_option_plus_one(value + captured); }`
+ | help: try: `if let Some(value) = x.field { x.do_option_plus_one(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:53:5
@@ -46,7 +46,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| do_nothing(value + captured));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { do_nothing(value + captured) }`
+ | help: try: `if let Some(value) = x.field { do_nothing(value + captured) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:55:5
@@ -54,7 +54,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { do_nothing(value + captured) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { do_nothing(value + captured) }`
+ | help: try: `if let Some(value) = x.field { do_nothing(value + captured) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:57:5
@@ -62,7 +62,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { do_nothing(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { do_nothing(value + captured); }`
+ | help: try: `if let Some(value) = x.field { do_nothing(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:59:5
@@ -70,7 +70,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { { do_nothing(value + captured); } });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { do_nothing(value + captured); }`
+ | help: try: `if let Some(value) = x.field { do_nothing(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:62:5
@@ -78,7 +78,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| diverge(value + captured));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { diverge(value + captured) }`
+ | help: try: `if let Some(value) = x.field { diverge(value + captured) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:64:5
@@ -86,7 +86,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { diverge(value + captured) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { diverge(value + captured) }`
+ | help: try: `if let Some(value) = x.field { diverge(value + captured) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:66:5
@@ -94,7 +94,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { diverge(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { diverge(value + captured); }`
+ | help: try: `if let Some(value) = x.field { diverge(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:68:5
@@ -102,7 +102,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { { diverge(value + captured); } });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { diverge(value + captured); }`
+ | help: try: `if let Some(value) = x.field { diverge(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:73:5
@@ -110,7 +110,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { let y = plus_one(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { let y = plus_one(value + captured); }`
+ | help: try: `if let Some(value) = x.field { let y = plus_one(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:75:5
@@ -118,7 +118,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { plus_one(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { plus_one(value + captured); }`
+ | help: try: `if let Some(value) = x.field { plus_one(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:77:5
@@ -126,7 +126,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|value| { { plus_one(value + captured); } });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = x.field { plus_one(value + captured); }`
+ | help: try: `if let Some(value) = x.field { plus_one(value + captured); }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:80:5
@@ -134,7 +134,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | x.field.map(|ref value| { do_nothing(value + captured) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(ref value) = x.field { do_nothing(value + captured) }`
+ | help: try: `if let Some(ref value) = x.field { do_nothing(value + captured) }`
error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:82:5
@@ -142,7 +142,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns
LL | option().map(do_nothing);
| ^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(a) = option() { do_nothing(a) }`
+ | help: try: `if let Some(a) = option() { do_nothing(a) }`
error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()`
--> $DIR/option_map_unit_fn_fixable.rs:84:5
@@ -150,7 +150,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns
LL | option().map(|value| println!("{:?}", value));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Some(value) = option() { println!("{:?}", value) }`
+ | help: try: `if let Some(value) = option() { println!("{:?}", value) }`
error: aborting due to 19 previous errors
diff --git a/src/tools/clippy/tests/ui/or_fun_call.fixed b/src/tools/clippy/tests/ui/or_fun_call.fixed
index 703debb7a..581f3ad45 100644
--- a/src/tools/clippy/tests/ui/or_fun_call.fixed
+++ b/src/tools/clippy/tests/ui/or_fun_call.fixed
@@ -9,8 +9,7 @@
clippy::useless_vec
)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
+use std::collections::{BTreeMap, HashMap};
use std::time::Duration;
/// Checks implementation of the `OR_FUN_CALL` lint.
@@ -191,7 +190,7 @@ mod issue8239 {
acc.push_str(&f);
acc
})
- .unwrap_or_default();
+ .unwrap_or(String::new());
}
fn more_to_max_suggestion_highest_lines_1() {
@@ -204,7 +203,7 @@ mod issue8239 {
acc.push_str(&f);
acc
})
- .unwrap_or_default();
+ .unwrap_or(String::new());
}
fn equal_to_max_suggestion_highest_lines() {
@@ -216,7 +215,7 @@ mod issue8239 {
acc.push_str(&f);
acc
})
- .unwrap_or_default();
+ .unwrap_or(String::new());
}
fn less_than_max_suggestion_highest_lines() {
@@ -227,7 +226,7 @@ mod issue8239 {
acc.push_str(&f);
acc
})
- .unwrap_or_default();
+ .unwrap_or(String::new());
}
}
@@ -258,4 +257,59 @@ mod issue8993 {
}
}
+mod lazy {
+ use super::*;
+
+ fn foo() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo
+ }
+ }
+
+ struct FakeDefault;
+ impl FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ impl Default for FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ let with_new = Some(vec![1]);
+ with_new.unwrap_or_default();
+
+ let with_default_trait = Some(1);
+ with_default_trait.unwrap_or_default();
+
+ let with_default_type = Some(1);
+ with_default_type.unwrap_or_default();
+
+ let real_default = None::<FakeDefault>;
+ real_default.unwrap_or_default();
+
+ let mut map = HashMap::<u64, String>::new();
+ map.entry(42).or_default();
+
+ let mut btree = BTreeMap::<u64, String>::new();
+ btree.entry(42).or_default();
+
+ let stringy = Some(String::new());
+ let _ = stringy.unwrap_or_default();
+
+ // negative tests
+ let self_default = None::<FakeDefault>;
+ self_default.unwrap_or_else(<FakeDefault>::default);
+
+ let without_default = Some(Foo);
+ without_default.unwrap_or_else(Foo::new);
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/or_fun_call.rs b/src/tools/clippy/tests/ui/or_fun_call.rs
index bb86fe0d4..1f3987eb8 100644
--- a/src/tools/clippy/tests/ui/or_fun_call.rs
+++ b/src/tools/clippy/tests/ui/or_fun_call.rs
@@ -9,8 +9,7 @@
clippy::useless_vec
)]
-use std::collections::BTreeMap;
-use std::collections::HashMap;
+use std::collections::{BTreeMap, HashMap};
use std::time::Duration;
/// Checks implementation of the `OR_FUN_CALL` lint.
@@ -258,4 +257,59 @@ mod issue8993 {
}
}
+mod lazy {
+ use super::*;
+
+ fn foo() {
+ struct Foo;
+
+ impl Foo {
+ fn new() -> Foo {
+ Foo
+ }
+ }
+
+ struct FakeDefault;
+ impl FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ impl Default for FakeDefault {
+ fn default() -> Self {
+ FakeDefault
+ }
+ }
+
+ let with_new = Some(vec![1]);
+ with_new.unwrap_or_else(Vec::new);
+
+ let with_default_trait = Some(1);
+ with_default_trait.unwrap_or_else(Default::default);
+
+ let with_default_type = Some(1);
+ with_default_type.unwrap_or_else(u64::default);
+
+ let real_default = None::<FakeDefault>;
+ real_default.unwrap_or_else(<FakeDefault as Default>::default);
+
+ let mut map = HashMap::<u64, String>::new();
+ map.entry(42).or_insert_with(String::new);
+
+ let mut btree = BTreeMap::<u64, String>::new();
+ btree.entry(42).or_insert_with(String::new);
+
+ let stringy = Some(String::new());
+ let _ = stringy.unwrap_or_else(String::new);
+
+ // negative tests
+ let self_default = None::<FakeDefault>;
+ self_default.unwrap_or_else(<FakeDefault>::default);
+
+ let without_default = Some(Foo);
+ without_default.unwrap_or_else(Foo::new);
+ }
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/or_fun_call.stderr b/src/tools/clippy/tests/ui/or_fun_call.stderr
index 0b5c686be..519f09165 100644
--- a/src/tools/clippy/tests/ui/or_fun_call.stderr
+++ b/src/tools/clippy/tests/ui/or_fun_call.stderr
@@ -1,172 +1,192 @@
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:54:22
+ --> $DIR/or_fun_call.rs:53:22
|
LL | with_constructor.unwrap_or(make());
- | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(make)`
+ | ^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(make)`
|
= note: `-D clippy::or-fun-call` implied by `-D warnings`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:57:14
+error: use of `unwrap_or` to construct default value
+ --> $DIR/or_fun_call.rs:56:14
|
LL | with_new.unwrap_or(Vec::new());
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+ |
+ = note: `-D clippy::unwrap-or-default` implied by `-D warnings`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:60:21
+ --> $DIR/or_fun_call.rs:59:21
|
LL | with_const_args.unwrap_or(Vec::with_capacity(12));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| Vec::with_capacity(12))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| Vec::with_capacity(12))`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:63:14
+ --> $DIR/or_fun_call.rs:62:14
|
LL | with_err.unwrap_or(make());
- | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| make())`
+ | ^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| make())`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:66:19
+ --> $DIR/or_fun_call.rs:65:19
|
LL | with_err_args.unwrap_or(Vec::with_capacity(12));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| Vec::with_capacity(12))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| Vec::with_capacity(12))`
-error: use of `unwrap_or` followed by a call to `default`
- --> $DIR/or_fun_call.rs:69:24
+error: use of `unwrap_or` to construct default value
+ --> $DIR/or_fun_call.rs:68:24
|
LL | with_default_trait.unwrap_or(Default::default());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `unwrap_or` followed by a call to `default`
- --> $DIR/or_fun_call.rs:72:23
+error: use of `unwrap_or` to construct default value
+ --> $DIR/or_fun_call.rs:71:23
|
LL | with_default_type.unwrap_or(u64::default());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:75:18
+ --> $DIR/or_fun_call.rs:74:18
|
LL | self_default.unwrap_or(<FakeDefault>::default());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(<FakeDefault>::default)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(<FakeDefault>::default)`
-error: use of `unwrap_or` followed by a call to `default`
- --> $DIR/or_fun_call.rs:78:18
+error: use of `unwrap_or` to construct default value
+ --> $DIR/or_fun_call.rs:77:18
|
LL | real_default.unwrap_or(<FakeDefault as Default>::default());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:81:14
+error: use of `unwrap_or` to construct default value
+ --> $DIR/or_fun_call.rs:80:14
|
LL | with_vec.unwrap_or(vec![]);
- | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:84:21
+ --> $DIR/or_fun_call.rs:83:21
|
LL | without_default.unwrap_or(Foo::new());
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(Foo::new)`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(Foo::new)`
-error: use of `or_insert` followed by a call to `new`
- --> $DIR/or_fun_call.rs:87:19
+error: use of `or_insert` to construct default value
+ --> $DIR/or_fun_call.rs:86:19
|
LL | map.entry(42).or_insert(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()`
-error: use of `or_insert` followed by a call to `new`
- --> $DIR/or_fun_call.rs:90:23
+error: use of `or_insert` to construct default value
+ --> $DIR/or_fun_call.rs:89:23
|
LL | map_vec.entry(42).or_insert(vec![]);
- | ^^^^^^^^^^^^^^^^^ help: try this: `or_default()`
+ | ^^^^^^^^^^^^^^^^^ help: try: `or_default()`
-error: use of `or_insert` followed by a call to `new`
- --> $DIR/or_fun_call.rs:93:21
+error: use of `or_insert` to construct default value
+ --> $DIR/or_fun_call.rs:92:21
|
LL | btree.entry(42).or_insert(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()`
-error: use of `or_insert` followed by a call to `new`
- --> $DIR/or_fun_call.rs:96:25
+error: use of `or_insert` to construct default value
+ --> $DIR/or_fun_call.rs:95:25
|
LL | btree_vec.entry(42).or_insert(vec![]);
- | ^^^^^^^^^^^^^^^^^ help: try this: `or_default()`
+ | ^^^^^^^^^^^^^^^^^ help: try: `or_default()`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:99:21
+error: use of `unwrap_or` to construct default value
+ --> $DIR/or_fun_call.rs:98:21
|
LL | let _ = stringy.unwrap_or(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:107:21
+ --> $DIR/or_fun_call.rs:106:21
|
LL | let _ = Some(1).unwrap_or(map[&1]);
- | ^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| map[&1])`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| map[&1])`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:109:21
+ --> $DIR/or_fun_call.rs:108:21
|
LL | let _ = Some(1).unwrap_or(map[&1]);
- | ^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| map[&1])`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| map[&1])`
error: use of `or` followed by a function call
- --> $DIR/or_fun_call.rs:133:35
+ --> $DIR/or_fun_call.rs:132:35
|
LL | let _ = Some("a".to_string()).or(Some("b".to_string()));
- | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_else(|| Some("b".to_string()))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_else(|| Some("b".to_string()))`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:172:14
+ --> $DIR/or_fun_call.rs:171:14
|
LL | None.unwrap_or(ptr_to_ref(s));
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| ptr_to_ref(s))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| ptr_to_ref(s))`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:178:14
+ --> $DIR/or_fun_call.rs:177:14
|
LL | None.unwrap_or(unsafe { ptr_to_ref(s) });
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })`
error: use of `unwrap_or` followed by a function call
- --> $DIR/or_fun_call.rs:180:14
+ --> $DIR/or_fun_call.rs:179:14
|
LL | None.unwrap_or( unsafe { ptr_to_ref(s) } );
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:194:14
+error: use of `map_or` followed by a function call
+ --> $DIR/or_fun_call.rs:254:25
|
-LL | .unwrap_or(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+LL | let _ = Some(4).map_or(g(), |v| v);
+ | ^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(g, |v| v)`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:207:14
+error: use of `map_or` followed by a function call
+ --> $DIR/or_fun_call.rs:255:25
|
-LL | .unwrap_or(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+LL | let _ = Some(4).map_or(g(), f);
+ | ^^^^^^^^^^^^^^ help: try: `map_or_else(g, f)`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:219:14
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/or_fun_call.rs:286:18
|
-LL | .unwrap_or(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+LL | with_new.unwrap_or_else(Vec::new);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `unwrap_or` followed by a call to `new`
- --> $DIR/or_fun_call.rs:230:10
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/or_fun_call.rs:289:28
|
-LL | .unwrap_or(String::new());
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()`
+LL | with_default_trait.unwrap_or_else(Default::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `map_or` followed by a function call
- --> $DIR/or_fun_call.rs:255:25
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/or_fun_call.rs:292:27
|
-LL | let _ = Some(4).map_or(g(), |v| v);
- | ^^^^^^^^^^^^^^^^^^ help: try this: `map_or_else(g, |v| v)`
+LL | with_default_type.unwrap_or_else(u64::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `map_or` followed by a function call
- --> $DIR/or_fun_call.rs:256:25
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/or_fun_call.rs:295:22
|
-LL | let _ = Some(4).map_or(g(), f);
- | ^^^^^^^^^^^^^^ help: try this: `map_or_else(g, f)`
+LL | real_default.unwrap_or_else(<FakeDefault as Default>::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `or_insert_with` to construct default value
+ --> $DIR/or_fun_call.rs:298:23
+ |
+LL | map.entry(42).or_insert_with(String::new);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()`
+
+error: use of `or_insert_with` to construct default value
+ --> $DIR/or_fun_call.rs:301:25
+ |
+LL | btree.entry(42).or_insert_with(String::new);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/or_fun_call.rs:304:25
+ |
+LL | let _ = stringy.unwrap_or_else(String::new);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: aborting due to 28 previous errors
+error: aborting due to 31 previous errors
diff --git a/src/tools/clippy/tests/ui/or_then_unwrap.stderr b/src/tools/clippy/tests/ui/or_then_unwrap.stderr
index da88154c5..2a1a52407 100644
--- a/src/tools/clippy/tests/ui/or_then_unwrap.stderr
+++ b/src/tools/clippy/tests/ui/or_then_unwrap.stderr
@@ -2,7 +2,7 @@ error: found `.or(Some(…)).unwrap()`
--> $DIR/or_then_unwrap.rs:24:20
|
LL | let _ = option.or(Some("fallback")).unwrap(); // should trigger lint
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or("fallback")`
|
= note: `-D clippy::or-then-unwrap` implied by `-D warnings`
@@ -10,13 +10,13 @@ error: found `.or(Ok(…)).unwrap()`
--> $DIR/or_then_unwrap.rs:27:20
|
LL | let _ = result.or::<&str>(Ok("fallback")).unwrap(); // should trigger lint
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or("fallback")`
error: found `.or(Some(…)).unwrap()`
--> $DIR/or_then_unwrap.rs:31:31
|
LL | let _ = option.map(|v| v).or(Some("fallback")).unwrap().to_string().chars(); // should trigger lint
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or("fallback")`
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn.stderr b/src/tools/clippy/tests/ui/panic_in_result_fn.stderr
index 97787bc84..b758fc238 100644
--- a/src/tools/clippy/tests/ui/panic_in_result_fn.stderr
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn.stderr
@@ -1,4 +1,4 @@
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+error: used `panic!()` or assertion in a function that returns `Result`
--> $DIR/panic_in_result_fn.rs:6:5
|
LL | / fn result_with_panic() -> Result<bool, String> // should emit lint
@@ -7,7 +7,7 @@ LL | | panic!("error");
LL | | }
| |_____^
|
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+ = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
note: return Err() instead of panicking
--> $DIR/panic_in_result_fn.rs:8:9
|
@@ -15,55 +15,7 @@ LL | panic!("error");
| ^^^^^^^^^^^^^^^
= note: `-D clippy::panic-in-result-fn` implied by `-D warnings`
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
- --> $DIR/panic_in_result_fn.rs:11:5
- |
-LL | / fn result_with_unimplemented() -> Result<bool, String> // should emit lint
-LL | | {
-LL | | unimplemented!();
-LL | | }
- | |_____^
- |
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
-note: return Err() instead of panicking
- --> $DIR/panic_in_result_fn.rs:13:9
- |
-LL | unimplemented!();
- | ^^^^^^^^^^^^^^^^
-
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
- --> $DIR/panic_in_result_fn.rs:16:5
- |
-LL | / fn result_with_unreachable() -> Result<bool, String> // should emit lint
-LL | | {
-LL | | unreachable!();
-LL | | }
- | |_____^
- |
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
-note: return Err() instead of panicking
- --> $DIR/panic_in_result_fn.rs:18:9
- |
-LL | unreachable!();
- | ^^^^^^^^^^^^^^
-
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
- --> $DIR/panic_in_result_fn.rs:21:5
- |
-LL | / fn result_with_todo() -> Result<bool, String> // should emit lint
-LL | | {
-LL | | todo!("Finish this");
-LL | | }
- | |_____^
- |
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
-note: return Err() instead of panicking
- --> $DIR/panic_in_result_fn.rs:23:9
- |
-LL | todo!("Finish this");
- | ^^^^^^^^^^^^^^^^^^^^
-
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+error: used `panic!()` or assertion in a function that returns `Result`
--> $DIR/panic_in_result_fn.rs:52:1
|
LL | / fn function_result_with_panic() -> Result<bool, String> // should emit lint
@@ -72,28 +24,12 @@ LL | | panic!("error");
LL | | }
| |_^
|
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+ = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
note: return Err() instead of panicking
--> $DIR/panic_in_result_fn.rs:54:5
|
LL | panic!("error");
| ^^^^^^^^^^^^^^^
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
- --> $DIR/panic_in_result_fn.rs:67:1
- |
-LL | / fn main() -> Result<(), String> {
-LL | | todo!("finish main method");
-LL | | Ok(())
-LL | | }
- | |_^
- |
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
-note: return Err() instead of panicking
- --> $DIR/panic_in_result_fn.rs:68:5
- |
-LL | todo!("finish main method");
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to 6 previous errors
+error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr
index eb0aacbb6..0dd213a7e 100644
--- a/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr
+++ b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr
@@ -1,4 +1,4 @@
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+error: used `panic!()` or assertion in a function that returns `Result`
--> $DIR/panic_in_result_fn_assertions.rs:7:5
|
LL | / fn result_with_assert_with_message(x: i32) -> Result<bool, String> // should emit lint
@@ -8,7 +8,7 @@ LL | | Ok(true)
LL | | }
| |_____^
|
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+ = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
note: return Err() instead of panicking
--> $DIR/panic_in_result_fn_assertions.rs:9:9
|
@@ -16,7 +16,7 @@ LL | assert!(x == 5, "wrong argument");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: `-D clippy::panic-in-result-fn` implied by `-D warnings`
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+error: used `panic!()` or assertion in a function that returns `Result`
--> $DIR/panic_in_result_fn_assertions.rs:13:5
|
LL | / fn result_with_assert_eq(x: i32) -> Result<bool, String> // should emit lint
@@ -26,14 +26,14 @@ LL | | Ok(true)
LL | | }
| |_____^
|
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+ = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
note: return Err() instead of panicking
--> $DIR/panic_in_result_fn_assertions.rs:15:9
|
LL | assert_eq!(x, 5);
| ^^^^^^^^^^^^^^^^
-error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`
+error: used `panic!()` or assertion in a function that returns `Result`
--> $DIR/panic_in_result_fn_assertions.rs:19:5
|
LL | / fn result_with_assert_ne(x: i32) -> Result<bool, String> // should emit lint
@@ -43,7 +43,7 @@ LL | | Ok(true)
LL | | }
| |_____^
|
- = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
+ = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing
note: return Err() instead of panicking
--> $DIR/panic_in_result_fn_assertions.rs:21:9
|
diff --git a/src/tools/clippy/tests/ui/print_literal.stderr b/src/tools/clippy/tests/ui/print_literal.stderr
index 6404dacda..71c8d188f 100644
--- a/src/tools/clippy/tests/ui/print_literal.stderr
+++ b/src/tools/clippy/tests/ui/print_literal.stderr
@@ -5,7 +5,7 @@ LL | print!("Hello {}", "world");
| ^^^^^^^
|
= note: `-D clippy::print-literal` implied by `-D warnings`
-help: try this
+help: try
|
LL - print!("Hello {}", "world");
LL + print!("Hello world");
@@ -17,7 +17,7 @@ error: literal with an empty format string
LL | println!("Hello {} {}", world, "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("Hello {} {}", world, "world");
LL + println!("Hello {} world", world);
@@ -29,7 +29,7 @@ error: literal with an empty format string
LL | println!("Hello {}", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("Hello {}", "world");
LL + println!("Hello world");
@@ -41,7 +41,7 @@ error: literal with an empty format string
LL | println!("{} {:.4}", "a literal", 5);
| ^^^^^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{} {:.4}", "a literal", 5);
LL + println!("a literal {:.4}", 5);
@@ -53,7 +53,7 @@ error: literal with an empty format string
LL | println!("{0} {1}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{0} {1}", "hello", "world");
LL + println!("hello {1}", "world");
@@ -65,7 +65,7 @@ error: literal with an empty format string
LL | println!("{0} {1}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{0} {1}", "hello", "world");
LL + println!("{0} world", "hello");
@@ -77,7 +77,7 @@ error: literal with an empty format string
LL | println!("{1} {0}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{1} {0}", "hello", "world");
LL + println!("world {0}", "hello");
@@ -89,7 +89,7 @@ error: literal with an empty format string
LL | println!("{1} {0}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{1} {0}", "hello", "world");
LL + println!("{1} hello", "world");
@@ -101,7 +101,7 @@ error: literal with an empty format string
LL | println!("{foo} {bar}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{foo} {bar}", foo = "hello", bar = "world");
LL + println!("hello {bar}", bar = "world");
@@ -113,7 +113,7 @@ error: literal with an empty format string
LL | println!("{foo} {bar}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{foo} {bar}", foo = "hello", bar = "world");
LL + println!("{foo} world", foo = "hello");
@@ -125,7 +125,7 @@ error: literal with an empty format string
LL | println!("{bar} {foo}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{bar} {foo}", foo = "hello", bar = "world");
LL + println!("world {foo}", foo = "hello");
@@ -137,7 +137,7 @@ error: literal with an empty format string
LL | println!("{bar} {foo}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - println!("{bar} {foo}", foo = "hello", bar = "world");
LL + println!("{bar} hello", bar = "world");
diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs
index 709f74ee6..08075c382 100644
--- a/src/tools/clippy/tests/ui/ptr_arg.rs
+++ b/src/tools/clippy/tests/ui/ptr_arg.rs
@@ -3,7 +3,8 @@
unused,
clippy::many_single_char_names,
clippy::needless_lifetimes,
- clippy::redundant_clone
+ clippy::redundant_clone,
+ clippy::needless_pass_by_ref_mut
)]
#![warn(clippy::ptr_arg)]
@@ -266,3 +267,16 @@ mod issue_9218 {
todo!()
}
}
+
+mod issue_11181 {
+ extern "C" fn allowed(_v: &Vec<u32>) {}
+
+ struct S;
+ impl S {
+ extern "C" fn allowed(_v: &Vec<u32>) {}
+ }
+
+ trait T {
+ extern "C" fn allowed(_v: &Vec<u32>) {}
+ }
+}
diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr
index d663b070b..0e9dd760f 100644
--- a/src/tools/clippy/tests/ui/ptr_arg.stderr
+++ b/src/tools/clippy/tests/ui/ptr_arg.stderr
@@ -1,5 +1,5 @@
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:13:14
+ --> $DIR/ptr_arg.rs:14:14
|
LL | fn do_vec(x: &Vec<i64>) {
| ^^^^^^^^^ help: change this to: `&[i64]`
@@ -7,43 +7,43 @@ LL | fn do_vec(x: &Vec<i64>) {
= note: `-D clippy::ptr-arg` implied by `-D warnings`
error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:17:18
+ --> $DIR/ptr_arg.rs:18:18
|
LL | fn do_vec_mut(x: &mut Vec<i64>) {
| ^^^^^^^^^^^^^ help: change this to: `&mut [i64]`
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:21:14
+ --> $DIR/ptr_arg.rs:22:14
|
LL | fn do_str(x: &String) {
| ^^^^^^^ help: change this to: `&str`
error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:25:18
+ --> $DIR/ptr_arg.rs:26:18
|
LL | fn do_str_mut(x: &mut String) {
| ^^^^^^^^^^^ help: change this to: `&mut str`
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:29:15
+ --> $DIR/ptr_arg.rs:30:15
|
LL | fn do_path(x: &PathBuf) {
| ^^^^^^^^ help: change this to: `&Path`
error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:33:19
+ --> $DIR/ptr_arg.rs:34:19
|
LL | fn do_path_mut(x: &mut PathBuf) {
| ^^^^^^^^^^^^ help: change this to: `&mut Path`
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:41:18
+ --> $DIR/ptr_arg.rs:42:18
|
LL | fn do_vec(x: &Vec<i64>);
| ^^^^^^^^^ help: change this to: `&[i64]`
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:54:14
+ --> $DIR/ptr_arg.rs:55:14
|
LL | fn cloned(x: &Vec<u8>) -> Vec<u8> {
| ^^^^^^^^
@@ -60,7 +60,7 @@ LL ~ x.to_owned()
|
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:63:18
+ --> $DIR/ptr_arg.rs:64:18
|
LL | fn str_cloned(x: &String) -> String {
| ^^^^^^^
@@ -76,7 +76,7 @@ LL ~ x.to_owned()
|
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:71:19
+ --> $DIR/ptr_arg.rs:72:19
|
LL | fn path_cloned(x: &PathBuf) -> PathBuf {
| ^^^^^^^^
@@ -92,7 +92,7 @@ LL ~ x.to_path_buf()
|
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:79:44
+ --> $DIR/ptr_arg.rs:80:44
|
LL | fn false_positive_capacity(x: &Vec<u8>, y: &String) {
| ^^^^^^^
@@ -106,19 +106,19 @@ LL ~ let c = y;
|
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:93:25
+ --> $DIR/ptr_arg.rs:94:25
|
LL | fn test_cow_with_ref(c: &Cow<[i32]>) {}
| ^^^^^^^^^^^ help: change this to: `&[i32]`
error: writing `&String` instead of `&str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:122:66
+ --> $DIR/ptr_arg.rs:123:66
|
LL | fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {}
| ^^^^^^^ help: change this to: `&str`
error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:151:21
+ --> $DIR/ptr_arg.rs:152:21
|
LL | fn foo_vec(vec: &Vec<u8>) {
| ^^^^^^^^
@@ -131,7 +131,7 @@ LL ~ let _ = vec.to_owned().clone();
|
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:156:23
+ --> $DIR/ptr_arg.rs:157:23
|
LL | fn foo_path(path: &PathBuf) {
| ^^^^^^^^
@@ -144,7 +144,7 @@ LL ~ let _ = path.to_path_buf().clone();
|
error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:161:21
+ --> $DIR/ptr_arg.rs:162:21
|
LL | fn foo_str(str: &PathBuf) {
| ^^^^^^^^
@@ -157,43 +157,43 @@ LL ~ let _ = str.to_path_buf().clone();
|
error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:167:29
+ --> $DIR/ptr_arg.rs:168:29
|
LL | fn mut_vec_slice_methods(v: &mut Vec<u32>) {
| ^^^^^^^^^^^^^ help: change this to: `&mut [u32]`
error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:229:17
+ --> $DIR/ptr_arg.rs:230:17
|
LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) {
| ^^^^^^^^^^^^^ help: change this to: `&mut [u32]`
error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:229:35
+ --> $DIR/ptr_arg.rs:230:35
|
LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) {
| ^^^^^^^^^^^ help: change this to: `&mut str`
error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do
- --> $DIR/ptr_arg.rs:229:51
+ --> $DIR/ptr_arg.rs:230:51
|
LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) {
| ^^^^^^^^^^^^ help: change this to: `&mut Path`
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:252:39
+ --> $DIR/ptr_arg.rs:253:39
|
LL | fn cow_elided_lifetime<'a>(input: &'a Cow<str>) -> &'a str {
| ^^^^^^^^^^^^ help: change this to: `&str`
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:257:36
+ --> $DIR/ptr_arg.rs:258:36
|
LL | fn cow_bad_ret_ty_1<'a>(input: &'a Cow<'a, str>) -> &'static str {
| ^^^^^^^^^^^^^^^^ help: change this to: `&str`
error: using a reference to `Cow` is not recommended
- --> $DIR/ptr_arg.rs:260:40
+ --> $DIR/ptr_arg.rs:261:40
|
LL | fn cow_bad_ret_ty_2<'a, 'b>(input: &'a Cow<'a, str>) -> &'b str {
| ^^^^^^^^^^^^^^^^ help: change this to: `&str`
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
index 26a64c861..84babb974 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed
@@ -3,8 +3,22 @@
#![warn(clippy::ptr_as_ptr)]
+#[macro_use]
extern crate proc_macros;
-use proc_macros::{external, inline_macros};
+
+mod issue_11278_a {
+ #[derive(Debug)]
+ pub struct T<D: std::fmt::Debug + ?Sized> {
+ pub p: D,
+ }
+}
+
+mod issue_11278_b {
+ pub fn f(o: &mut super::issue_11278_a::T<dyn std::fmt::Debug>) -> super::issue_11278_a::T<String> {
+ // Retain `super`
+ *unsafe { Box::from_raw(Box::into_raw(Box::new(o)).cast::<super::issue_11278_a::T<String>>()) }
+ }
+}
#[inline_macros]
fn main() {
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.rs b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
index ea40d4947..34fd76428 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.rs
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.rs
@@ -3,8 +3,22 @@
#![warn(clippy::ptr_as_ptr)]
+#[macro_use]
extern crate proc_macros;
-use proc_macros::{external, inline_macros};
+
+mod issue_11278_a {
+ #[derive(Debug)]
+ pub struct T<D: std::fmt::Debug + ?Sized> {
+ pub p: D,
+ }
+}
+
+mod issue_11278_b {
+ pub fn f(o: &mut super::issue_11278_a::T<dyn std::fmt::Debug>) -> super::issue_11278_a::T<String> {
+ // Retain `super`
+ *unsafe { Box::from_raw(Box::into_raw(Box::new(o)) as *mut super::issue_11278_a::T<String>) }
+ }
+}
#[inline_macros]
fn main() {
diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
index 78d733994..e64f33515 100644
--- a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
+++ b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr
@@ -1,37 +1,43 @@
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:14:13
+ --> $DIR/ptr_as_ptr.rs:19:33
|
-LL | let _ = ptr as *const i32;
- | ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
+LL | *unsafe { Box::from_raw(Box::into_raw(Box::new(o)) as *mut super::issue_11278_a::T<String>) }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `Box::into_raw(Box::new(o)).cast::<super::issue_11278_a::T<String>>()`
|
= note: `-D clippy::ptr-as-ptr` implied by `-D warnings`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:15:13
+ --> $DIR/ptr_as_ptr.rs:28:13
+ |
+LL | let _ = ptr as *const i32;
+ | ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
+
+error: `as` casting between raw pointers without changing its mutability
+ --> $DIR/ptr_as_ptr.rs:29:13
|
LL | let _ = mut_ptr as *mut i32;
| ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:20:17
+ --> $DIR/ptr_as_ptr.rs:34:17
|
LL | let _ = *ptr_ptr as *const i32;
| ^^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `(*ptr_ptr).cast::<i32>()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:33:25
+ --> $DIR/ptr_as_ptr.rs:47:25
|
LL | let _: *const i32 = ptr as *const _;
| ^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:34:23
+ --> $DIR/ptr_as_ptr.rs:48:23
|
LL | let _: *mut i32 = mut_ptr as _;
| ^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:37:21
+ --> $DIR/ptr_as_ptr.rs:51:21
|
LL | let _ = inline!($ptr as *const i32);
| ^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `$ptr.cast::<i32>()`
@@ -39,16 +45,16 @@ LL | let _ = inline!($ptr as *const i32);
= note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info)
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:58:13
+ --> $DIR/ptr_as_ptr.rs:72:13
|
LL | let _ = ptr as *const i32;
| ^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `ptr.cast::<i32>()`
error: `as` casting between raw pointers without changing its mutability
- --> $DIR/ptr_as_ptr.rs:59:13
+ --> $DIR/ptr_as_ptr.rs:73:13
|
LL | let _ = mut_ptr as *mut i32;
| ^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast`, a safer alternative: `mut_ptr.cast::<i32>()`
-error: aborting due to 8 previous errors
+error: aborting due to 9 previous errors
diff --git a/src/tools/clippy/tests/ui/question_mark.fixed b/src/tools/clippy/tests/ui/question_mark.fixed
index 2d8920ccc..20b9e42a7 100644
--- a/src/tools/clippy/tests/ui/question_mark.fixed
+++ b/src/tools/clippy/tests/ui/question_mark.fixed
@@ -138,6 +138,23 @@ fn result_func(x: Result<i32, i32>) -> Result<i32, i32> {
// no warning
let _ = if let Err(e) = x { Err(e) } else { Ok(0) };
+ // issue #11283
+ // no warning
+ #[warn(clippy::question_mark_used)]
+ {
+ if let Err(err) = Ok(()) {
+ return Err(err);
+ }
+
+ if Err::<i32, _>(0).is_err() {
+ return Err(0);
+ } else {
+ return Ok(0);
+ }
+
+ unreachable!()
+ }
+
Ok(y)
}
diff --git a/src/tools/clippy/tests/ui/question_mark.rs b/src/tools/clippy/tests/ui/question_mark.rs
index 69451c17e..8bdafd46e 100644
--- a/src/tools/clippy/tests/ui/question_mark.rs
+++ b/src/tools/clippy/tests/ui/question_mark.rs
@@ -170,6 +170,23 @@ fn result_func(x: Result<i32, i32>) -> Result<i32, i32> {
// no warning
let _ = if let Err(e) = x { Err(e) } else { Ok(0) };
+ // issue #11283
+ // no warning
+ #[warn(clippy::question_mark_used)]
+ {
+ if let Err(err) = Ok(()) {
+ return Err(err);
+ }
+
+ if Err::<i32, _>(0).is_err() {
+ return Err(0);
+ } else {
+ return Ok(0);
+ }
+
+ unreachable!()
+ }
+
Ok(y)
}
diff --git a/src/tools/clippy/tests/ui/question_mark.stderr b/src/tools/clippy/tests/ui/question_mark.stderr
index 2cfd75863..62489c8c8 100644
--- a/src/tools/clippy/tests/ui/question_mark.stderr
+++ b/src/tools/clippy/tests/ui/question_mark.stderr
@@ -115,7 +115,7 @@ LL | | }
| |_____^ help: replace it with: `x?;`
error: this block may be rewritten with the `?` operator
- --> $DIR/question_mark.rs:197:5
+ --> $DIR/question_mark.rs:214:5
|
LL | / if let Err(err) = func_returning_result() {
LL | | return Err(err);
@@ -123,7 +123,7 @@ LL | | }
| |_____^ help: replace it with: `func_returning_result()?;`
error: this block may be rewritten with the `?` operator
- --> $DIR/question_mark.rs:204:5
+ --> $DIR/question_mark.rs:221:5
|
LL | / if let Err(err) = func_returning_result() {
LL | | return Err(err);
@@ -131,7 +131,7 @@ LL | | }
| |_____^ help: replace it with: `func_returning_result()?;`
error: this block may be rewritten with the `?` operator
- --> $DIR/question_mark.rs:281:13
+ --> $DIR/question_mark.rs:298:13
|
LL | / if a.is_none() {
LL | | return None;
diff --git a/src/tools/clippy/tests/ui/range_contains.fixed b/src/tools/clippy/tests/ui/range_contains.fixed
index 0a92ee7c8..47c524811 100644
--- a/src/tools/clippy/tests/ui/range_contains.fixed
+++ b/src/tools/clippy/tests/ui/range_contains.fixed
@@ -5,6 +5,8 @@
#![allow(clippy::no_effect)]
#![allow(clippy::short_circuit_statement)]
#![allow(clippy::unnecessary_operation)]
+#![allow(clippy::impossible_comparisons)]
+#![allow(clippy::redundant_comparisons)]
fn main() {
let x = 9_i32;
diff --git a/src/tools/clippy/tests/ui/range_contains.rs b/src/tools/clippy/tests/ui/range_contains.rs
index 7a83be609..a35315a64 100644
--- a/src/tools/clippy/tests/ui/range_contains.rs
+++ b/src/tools/clippy/tests/ui/range_contains.rs
@@ -5,6 +5,8 @@
#![allow(clippy::no_effect)]
#![allow(clippy::short_circuit_statement)]
#![allow(clippy::unnecessary_operation)]
+#![allow(clippy::impossible_comparisons)]
+#![allow(clippy::redundant_comparisons)]
fn main() {
let x = 9_i32;
diff --git a/src/tools/clippy/tests/ui/range_contains.stderr b/src/tools/clippy/tests/ui/range_contains.stderr
index ea34023a4..1265db695 100644
--- a/src/tools/clippy/tests/ui/range_contains.stderr
+++ b/src/tools/clippy/tests/ui/range_contains.stderr
@@ -1,5 +1,5 @@
error: manual `Range::contains` implementation
- --> $DIR/range_contains.rs:13:5
+ --> $DIR/range_contains.rs:15:5
|
LL | x >= 8 && x < 12;
| ^^^^^^^^^^^^^^^^ help: use: `(8..12).contains(&x)`
@@ -7,121 +7,121 @@ LL | x >= 8 && x < 12;
= note: `-D clippy::manual-range-contains` implied by `-D warnings`
error: manual `Range::contains` implementation
- --> $DIR/range_contains.rs:14:5
+ --> $DIR/range_contains.rs:16:5
|
LL | x < 42 && x >= 21;
| ^^^^^^^^^^^^^^^^^ help: use: `(21..42).contains(&x)`
error: manual `Range::contains` implementation
- --> $DIR/range_contains.rs:15:5
+ --> $DIR/range_contains.rs:17:5
|
LL | 100 > x && 1 <= x;
| ^^^^^^^^^^^^^^^^^ help: use: `(1..100).contains(&x)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:18:5
+ --> $DIR/range_contains.rs:20:5
|
LL | x >= 9 && x <= 99;
| ^^^^^^^^^^^^^^^^^ help: use: `(9..=99).contains(&x)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:19:5
+ --> $DIR/range_contains.rs:21:5
|
LL | x <= 33 && x >= 1;
| ^^^^^^^^^^^^^^^^^ help: use: `(1..=33).contains(&x)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:20:5
+ --> $DIR/range_contains.rs:22:5
|
LL | 999 >= x && 1 <= x;
| ^^^^^^^^^^^^^^^^^^ help: use: `(1..=999).contains(&x)`
error: manual `!Range::contains` implementation
- --> $DIR/range_contains.rs:23:5
+ --> $DIR/range_contains.rs:25:5
|
LL | x < 8 || x >= 12;
| ^^^^^^^^^^^^^^^^ help: use: `!(8..12).contains(&x)`
error: manual `!Range::contains` implementation
- --> $DIR/range_contains.rs:24:5
+ --> $DIR/range_contains.rs:26:5
|
LL | x >= 42 || x < 21;
| ^^^^^^^^^^^^^^^^^ help: use: `!(21..42).contains(&x)`
error: manual `!Range::contains` implementation
- --> $DIR/range_contains.rs:25:5
+ --> $DIR/range_contains.rs:27:5
|
LL | 100 <= x || 1 > x;
| ^^^^^^^^^^^^^^^^^ help: use: `!(1..100).contains(&x)`
error: manual `!RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:28:5
+ --> $DIR/range_contains.rs:30:5
|
LL | x < 9 || x > 99;
| ^^^^^^^^^^^^^^^ help: use: `!(9..=99).contains(&x)`
error: manual `!RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:29:5
+ --> $DIR/range_contains.rs:31:5
|
LL | x > 33 || x < 1;
| ^^^^^^^^^^^^^^^ help: use: `!(1..=33).contains(&x)`
error: manual `!RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:30:5
+ --> $DIR/range_contains.rs:32:5
|
LL | 999 < x || 1 > x;
| ^^^^^^^^^^^^^^^^ help: use: `!(1..=999).contains(&x)`
error: manual `Range::contains` implementation
- --> $DIR/range_contains.rs:45:5
+ --> $DIR/range_contains.rs:47:5
|
LL | y >= 0. && y < 1.;
| ^^^^^^^^^^^^^^^^^ help: use: `(0. ..1.).contains(&y)`
error: manual `!RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:46:5
+ --> $DIR/range_contains.rs:48:5
|
LL | y < 0. || y > 1.;
| ^^^^^^^^^^^^^^^^ help: use: `!(0. ..=1.).contains(&y)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:49:5
+ --> $DIR/range_contains.rs:51:5
|
LL | x >= -10 && x <= 10;
| ^^^^^^^^^^^^^^^^^^^ help: use: `(-10..=10).contains(&x)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:51:5
+ --> $DIR/range_contains.rs:53:5
|
LL | y >= -3. && y <= 3.;
| ^^^^^^^^^^^^^^^^^^^ help: use: `(-3. ..=3.).contains(&y)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:56:30
+ --> $DIR/range_contains.rs:58:30
|
LL | (x >= 0) && (x <= 10) && (z >= 0) && (z <= 10);
| ^^^^^^^^^^^^^^^^^^^^^ help: use: `(0..=10).contains(&z)`
error: manual `RangeInclusive::contains` implementation
- --> $DIR/range_contains.rs:56:5
+ --> $DIR/range_contains.rs:58:5
|
LL | (x >= 0) && (x <= 10) && (z >= 0) && (z <= 10);
| ^^^^^^^^^^^^^^^^^^^^^ help: use: `(0..=10).contains(&x)`
error: manual `!Range::contains` implementation
- --> $DIR/range_contains.rs:57:29
+ --> $DIR/range_contains.rs:59:29
|
LL | (x < 0) || (x >= 10) || (z < 0) || (z >= 10);
| ^^^^^^^^^^^^^^^^^^^^ help: use: `!(0..10).contains(&z)`
error: manual `!Range::contains` implementation
- --> $DIR/range_contains.rs:57:5
+ --> $DIR/range_contains.rs:59:5
|
LL | (x < 0) || (x >= 10) || (z < 0) || (z >= 10);
| ^^^^^^^^^^^^^^^^^^^^ help: use: `!(0..10).contains(&x)`
error: manual `Range::contains` implementation
- --> $DIR/range_contains.rs:76:5
+ --> $DIR/range_contains.rs:78:5
|
LL | x >= 8 && x < 35;
| ^^^^^^^^^^^^^^^^ help: use: `(8..35).contains(&x)`
diff --git a/src/tools/clippy/tests/ui/read_line_without_trim.fixed b/src/tools/clippy/tests/ui/read_line_without_trim.fixed
new file mode 100644
index 000000000..cb6aab84e
--- /dev/null
+++ b/src/tools/clippy/tests/ui/read_line_without_trim.fixed
@@ -0,0 +1,36 @@
+//@run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::read_line_without_trim)]
+
+fn main() {
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ input.pop();
+ let _x: i32 = input.parse().unwrap(); // don't trigger here, newline character is popped
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x: i32 = input.trim_end().parse().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.trim_end().parse::<i32>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.trim_end().parse::<u32>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.trim_end().parse::<f32>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.trim_end().parse::<bool>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ // this is actually ok, so don't lint here
+ let _x = input.parse::<String>().unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/read_line_without_trim.rs b/src/tools/clippy/tests/ui/read_line_without_trim.rs
new file mode 100644
index 000000000..bdc409a70
--- /dev/null
+++ b/src/tools/clippy/tests/ui/read_line_without_trim.rs
@@ -0,0 +1,36 @@
+//@run-rustfix
+
+#![allow(unused)]
+#![warn(clippy::read_line_without_trim)]
+
+fn main() {
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ input.pop();
+ let _x: i32 = input.parse().unwrap(); // don't trigger here, newline character is popped
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x: i32 = input.parse().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.parse::<i32>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.parse::<u32>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.parse::<f32>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ let _x = input.parse::<bool>().unwrap();
+
+ let mut input = String::new();
+ std::io::stdin().read_line(&mut input).unwrap();
+ // this is actually ok, so don't lint here
+ let _x = input.parse::<String>().unwrap();
+}
diff --git a/src/tools/clippy/tests/ui/read_line_without_trim.stderr b/src/tools/clippy/tests/ui/read_line_without_trim.stderr
new file mode 100644
index 000000000..f3d7b6042
--- /dev/null
+++ b/src/tools/clippy/tests/ui/read_line_without_trim.stderr
@@ -0,0 +1,73 @@
+error: calling `.parse()` without trimming the trailing newline character
+ --> $DIR/read_line_without_trim.rs:14:25
+ |
+LL | let _x: i32 = input.parse().unwrap();
+ | ----- ^^^^^^^
+ | |
+ | help: try: `input.trim_end()`
+ |
+note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail
+ --> $DIR/read_line_without_trim.rs:13:5
+ |
+LL | std::io::stdin().read_line(&mut input).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: `-D clippy::read-line-without-trim` implied by `-D warnings`
+
+error: calling `.parse()` without trimming the trailing newline character
+ --> $DIR/read_line_without_trim.rs:18:20
+ |
+LL | let _x = input.parse::<i32>().unwrap();
+ | ----- ^^^^^^^^^^^^^^
+ | |
+ | help: try: `input.trim_end()`
+ |
+note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail
+ --> $DIR/read_line_without_trim.rs:17:5
+ |
+LL | std::io::stdin().read_line(&mut input).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: calling `.parse()` without trimming the trailing newline character
+ --> $DIR/read_line_without_trim.rs:22:20
+ |
+LL | let _x = input.parse::<u32>().unwrap();
+ | ----- ^^^^^^^^^^^^^^
+ | |
+ | help: try: `input.trim_end()`
+ |
+note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail
+ --> $DIR/read_line_without_trim.rs:21:5
+ |
+LL | std::io::stdin().read_line(&mut input).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: calling `.parse()` without trimming the trailing newline character
+ --> $DIR/read_line_without_trim.rs:26:20
+ |
+LL | let _x = input.parse::<f32>().unwrap();
+ | ----- ^^^^^^^^^^^^^^
+ | |
+ | help: try: `input.trim_end()`
+ |
+note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail
+ --> $DIR/read_line_without_trim.rs:25:5
+ |
+LL | std::io::stdin().read_line(&mut input).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: calling `.parse()` without trimming the trailing newline character
+ --> $DIR/read_line_without_trim.rs:30:20
+ |
+LL | let _x = input.parse::<bool>().unwrap();
+ | ----- ^^^^^^^^^^^^^^^
+ | |
+ | help: try: `input.trim_end()`
+ |
+note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail
+ --> $DIR/read_line_without_trim.rs:29:5
+ |
+LL | std::io::stdin().read_line(&mut input).unwrap();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/read_zero_byte_vec.rs b/src/tools/clippy/tests/ui/read_zero_byte_vec.rs
index 30807e0f8..ff2ad8644 100644
--- a/src/tools/clippy/tests/ui/read_zero_byte_vec.rs
+++ b/src/tools/clippy/tests/ui/read_zero_byte_vec.rs
@@ -1,5 +1,9 @@
#![warn(clippy::read_zero_byte_vec)]
-#![allow(clippy::unused_io_amount)]
+#![allow(
+ clippy::unused_io_amount,
+ clippy::needless_pass_by_ref_mut,
+ clippy::slow_vector_initialization
+)]
use std::fs::File;
use std::io;
use std::io::prelude::*;
diff --git a/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr b/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr
index 08ba9753d..4c7f605f4 100644
--- a/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr
+++ b/src/tools/clippy/tests/ui/read_zero_byte_vec.stderr
@@ -1,5 +1,5 @@
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:17:5
+ --> $DIR/read_zero_byte_vec.rs:21:5
|
LL | f.read_exact(&mut data).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `data.resize(20, 0); f.read_exact(&mut data).unwrap();`
@@ -7,55 +7,55 @@ LL | f.read_exact(&mut data).unwrap();
= note: `-D clippy::read-zero-byte-vec` implied by `-D warnings`
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:21:5
+ --> $DIR/read_zero_byte_vec.rs:25:5
|
LL | f.read_exact(&mut data2)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `data2.resize(cap, 0); f.read_exact(&mut data2)?;`
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:25:5
+ --> $DIR/read_zero_byte_vec.rs:29:5
|
LL | f.read_exact(&mut data3)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:29:5
+ --> $DIR/read_zero_byte_vec.rs:33:5
|
LL | let _ = f.read(&mut data4)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:34:9
+ --> $DIR/read_zero_byte_vec.rs:38:9
|
LL | f.read(&mut data5)
| ^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:40:9
+ --> $DIR/read_zero_byte_vec.rs:44:9
|
LL | f.read(&mut data6)
| ^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:70:5
+ --> $DIR/read_zero_byte_vec.rs:74:5
|
LL | r.read(&mut data).await.unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:74:5
+ --> $DIR/read_zero_byte_vec.rs:78:5
|
LL | r.read_exact(&mut data2).await.unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:80:5
+ --> $DIR/read_zero_byte_vec.rs:84:5
|
LL | r.read(&mut data).await.unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: reading zero byte data to `Vec`
- --> $DIR/read_zero_byte_vec.rs:84:5
+ --> $DIR/read_zero_byte_vec.rs:88:5
|
LL | r.read_exact(&mut data2).await.unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/readonly_write_lock.rs b/src/tools/clippy/tests/ui/readonly_write_lock.rs
new file mode 100644
index 000000000..656b45787
--- /dev/null
+++ b/src/tools/clippy/tests/ui/readonly_write_lock.rs
@@ -0,0 +1,42 @@
+#![warn(clippy::readonly_write_lock)]
+
+use std::sync::RwLock;
+
+fn mutate_i32(x: &mut i32) {
+ *x += 1;
+}
+
+fn accept_i32(_: i32) {}
+
+fn main() {
+ let lock = RwLock::new(42);
+ let lock2 = RwLock::new(1234);
+
+ {
+ let writer = lock.write().unwrap();
+ dbg!(&writer);
+ }
+
+ {
+ let writer = lock.write().unwrap();
+ accept_i32(*writer);
+ }
+
+ {
+ let mut writer = lock.write().unwrap();
+ mutate_i32(&mut writer);
+ dbg!(&writer);
+ }
+
+ {
+ let mut writer = lock.write().unwrap();
+ *writer += 1;
+ }
+
+ {
+ let mut writer1 = lock.write().unwrap();
+ let mut writer2 = lock2.write().unwrap();
+ *writer2 += 1;
+ *writer1 = *writer2;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/readonly_write_lock.stderr b/src/tools/clippy/tests/ui/readonly_write_lock.stderr
new file mode 100644
index 000000000..e3d8fce7b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/readonly_write_lock.stderr
@@ -0,0 +1,16 @@
+error: this write lock is used only for reading
+ --> $DIR/readonly_write_lock.rs:16:22
+ |
+LL | let writer = lock.write().unwrap();
+ | ^^^^^^^^^^^^ help: consider using a read lock instead: `lock.read()`
+ |
+ = note: `-D clippy::readonly-write-lock` implied by `-D warnings`
+
+error: this write lock is used only for reading
+ --> $DIR/readonly_write_lock.rs:21:22
+ |
+LL | let writer = lock.write().unwrap();
+ | ^^^^^^^^^^^^ help: consider using a read lock instead: `lock.read()`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_allocation.rs b/src/tools/clippy/tests/ui/redundant_allocation.rs
index 574d34aed..9eb58a3e5 100644
--- a/src/tools/clippy/tests/ui/redundant_allocation.rs
+++ b/src/tools/clippy/tests/ui/redundant_allocation.rs
@@ -8,8 +8,7 @@ pub struct SubT<T> {
}
mod outer_box {
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
@@ -28,8 +27,7 @@ mod outer_box {
}
mod outer_rc {
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
@@ -48,8 +46,7 @@ mod outer_rc {
}
mod outer_arc {
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
diff --git a/src/tools/clippy/tests/ui/redundant_allocation.stderr b/src/tools/clippy/tests/ui/redundant_allocation.stderr
index e0826fefa..a9a1eed70 100644
--- a/src/tools/clippy/tests/ui/redundant_allocation.stderr
+++ b/src/tools/clippy/tests/ui/redundant_allocation.stderr
@@ -1,5 +1,5 @@
error: usage of `Box<Rc<T>>`
- --> $DIR/redundant_allocation.rs:17:30
+ --> $DIR/redundant_allocation.rs:16:30
|
LL | pub fn box_test6<T>(foo: Box<Rc<T>>) {}
| ^^^^^^^^^^
@@ -9,7 +9,7 @@ LL | pub fn box_test6<T>(foo: Box<Rc<T>>) {}
= note: `-D clippy::redundant-allocation` implied by `-D warnings`
error: usage of `Box<Arc<T>>`
- --> $DIR/redundant_allocation.rs:19:30
+ --> $DIR/redundant_allocation.rs:18:30
|
LL | pub fn box_test7<T>(foo: Box<Arc<T>>) {}
| ^^^^^^^^^^^
@@ -18,7 +18,7 @@ LL | pub fn box_test7<T>(foo: Box<Arc<T>>) {}
= help: consider using just `Box<T>` or `Arc<T>`
error: usage of `Box<Rc<SubT<usize>>>`
- --> $DIR/redundant_allocation.rs:21:27
+ --> $DIR/redundant_allocation.rs:20:27
|
LL | pub fn box_test8() -> Box<Rc<SubT<usize>>> {
| ^^^^^^^^^^^^^^^^^^^^
@@ -27,7 +27,7 @@ LL | pub fn box_test8() -> Box<Rc<SubT<usize>>> {
= help: consider using just `Box<SubT<usize>>` or `Rc<SubT<usize>>`
error: usage of `Box<Arc<T>>`
- --> $DIR/redundant_allocation.rs:25:30
+ --> $DIR/redundant_allocation.rs:24:30
|
LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
| ^^^^^^^^^^^
@@ -36,7 +36,7 @@ LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
= help: consider using just `Box<T>` or `Arc<T>`
error: usage of `Box<Arc<SubT<T>>>`
- --> $DIR/redundant_allocation.rs:25:46
+ --> $DIR/redundant_allocation.rs:24:46
|
LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
| ^^^^^^^^^^^^^^^^^
@@ -45,7 +45,7 @@ LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> {
= help: consider using just `Box<SubT<T>>` or `Arc<SubT<T>>`
error: usage of `Rc<Box<bool>>`
- --> $DIR/redundant_allocation.rs:37:24
+ --> $DIR/redundant_allocation.rs:35:24
|
LL | pub fn rc_test5(a: Rc<Box<bool>>) {}
| ^^^^^^^^^^^^^
@@ -54,7 +54,7 @@ LL | pub fn rc_test5(a: Rc<Box<bool>>) {}
= help: consider using just `Rc<bool>` or `Box<bool>`
error: usage of `Rc<Arc<bool>>`
- --> $DIR/redundant_allocation.rs:39:24
+ --> $DIR/redundant_allocation.rs:37:24
|
LL | pub fn rc_test7(a: Rc<Arc<bool>>) {}
| ^^^^^^^^^^^^^
@@ -63,7 +63,7 @@ LL | pub fn rc_test7(a: Rc<Arc<bool>>) {}
= help: consider using just `Rc<bool>` or `Arc<bool>`
error: usage of `Rc<Box<SubT<usize>>>`
- --> $DIR/redundant_allocation.rs:41:26
+ --> $DIR/redundant_allocation.rs:39:26
|
LL | pub fn rc_test8() -> Rc<Box<SubT<usize>>> {
| ^^^^^^^^^^^^^^^^^^^^
@@ -72,7 +72,7 @@ LL | pub fn rc_test8() -> Rc<Box<SubT<usize>>> {
= help: consider using just `Rc<SubT<usize>>` or `Box<SubT<usize>>`
error: usage of `Rc<Arc<T>>`
- --> $DIR/redundant_allocation.rs:45:29
+ --> $DIR/redundant_allocation.rs:43:29
|
LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
| ^^^^^^^^^^
@@ -81,7 +81,7 @@ LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
= help: consider using just `Rc<T>` or `Arc<T>`
error: usage of `Rc<Arc<SubT<T>>>`
- --> $DIR/redundant_allocation.rs:45:44
+ --> $DIR/redundant_allocation.rs:43:44
|
LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
| ^^^^^^^^^^^^^^^^
@@ -90,7 +90,7 @@ LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> {
= help: consider using just `Rc<SubT<T>>` or `Arc<SubT<T>>`
error: usage of `Arc<Box<bool>>`
- --> $DIR/redundant_allocation.rs:57:25
+ --> $DIR/redundant_allocation.rs:54:25
|
LL | pub fn arc_test5(a: Arc<Box<bool>>) {}
| ^^^^^^^^^^^^^^
@@ -99,7 +99,7 @@ LL | pub fn arc_test5(a: Arc<Box<bool>>) {}
= help: consider using just `Arc<bool>` or `Box<bool>`
error: usage of `Arc<Rc<bool>>`
- --> $DIR/redundant_allocation.rs:59:25
+ --> $DIR/redundant_allocation.rs:56:25
|
LL | pub fn arc_test6(a: Arc<Rc<bool>>) {}
| ^^^^^^^^^^^^^
@@ -108,7 +108,7 @@ LL | pub fn arc_test6(a: Arc<Rc<bool>>) {}
= help: consider using just `Arc<bool>` or `Rc<bool>`
error: usage of `Arc<Box<SubT<usize>>>`
- --> $DIR/redundant_allocation.rs:61:27
+ --> $DIR/redundant_allocation.rs:58:27
|
LL | pub fn arc_test8() -> Arc<Box<SubT<usize>>> {
| ^^^^^^^^^^^^^^^^^^^^^
@@ -117,7 +117,7 @@ LL | pub fn arc_test8() -> Arc<Box<SubT<usize>>> {
= help: consider using just `Arc<SubT<usize>>` or `Box<SubT<usize>>`
error: usage of `Arc<Rc<T>>`
- --> $DIR/redundant_allocation.rs:65:30
+ --> $DIR/redundant_allocation.rs:62:30
|
LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
| ^^^^^^^^^^
@@ -126,7 +126,7 @@ LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
= help: consider using just `Arc<T>` or `Rc<T>`
error: usage of `Arc<Rc<SubT<T>>>`
- --> $DIR/redundant_allocation.rs:65:45
+ --> $DIR/redundant_allocation.rs:62:45
|
LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
| ^^^^^^^^^^^^^^^^
@@ -135,7 +135,7 @@ LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> {
= help: consider using just `Arc<SubT<T>>` or `Rc<SubT<T>>`
error: usage of `Rc<Box<Box<dyn T>>>`
- --> $DIR/redundant_allocation.rs:87:27
+ --> $DIR/redundant_allocation.rs:84:27
|
LL | pub fn test_rc_box(_: Rc<Box<Box<dyn T>>>) {}
| ^^^^^^^^^^^^^^^^^^^
@@ -144,7 +144,7 @@ LL | pub fn test_rc_box(_: Rc<Box<Box<dyn T>>>) {}
= help: consider using just `Rc<Box<dyn T>>` or `Box<Box<dyn T>>`
error: usage of `Rc<Box<Box<str>>>`
- --> $DIR/redundant_allocation.rs:119:31
+ --> $DIR/redundant_allocation.rs:116:31
|
LL | pub fn test_rc_box_str(_: Rc<Box<Box<str>>>) {}
| ^^^^^^^^^^^^^^^^^
@@ -153,7 +153,7 @@ LL | pub fn test_rc_box_str(_: Rc<Box<Box<str>>>) {}
= help: consider using just `Rc<Box<str>>` or `Box<Box<str>>`
error: usage of `Rc<Box<Box<[usize]>>>`
- --> $DIR/redundant_allocation.rs:120:33
+ --> $DIR/redundant_allocation.rs:117:33
|
LL | pub fn test_rc_box_slice(_: Rc<Box<Box<[usize]>>>) {}
| ^^^^^^^^^^^^^^^^^^^^^
@@ -162,7 +162,7 @@ LL | pub fn test_rc_box_slice(_: Rc<Box<Box<[usize]>>>) {}
= help: consider using just `Rc<Box<[usize]>>` or `Box<Box<[usize]>>`
error: usage of `Rc<Box<Box<Path>>>`
- --> $DIR/redundant_allocation.rs:121:32
+ --> $DIR/redundant_allocation.rs:118:32
|
LL | pub fn test_rc_box_path(_: Rc<Box<Box<Path>>>) {}
| ^^^^^^^^^^^^^^^^^^
@@ -171,7 +171,7 @@ LL | pub fn test_rc_box_path(_: Rc<Box<Box<Path>>>) {}
= help: consider using just `Rc<Box<Path>>` or `Box<Box<Path>>`
error: usage of `Rc<Box<Box<DynSized>>>`
- --> $DIR/redundant_allocation.rs:122:34
+ --> $DIR/redundant_allocation.rs:119:34
|
LL | pub fn test_rc_box_custom(_: Rc<Box<Box<DynSized>>>) {}
| ^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed
index edb7715f4..b97863daf 100644
--- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed
+++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed
@@ -16,9 +16,7 @@ pub enum MyEnum {
}
mod outer_box {
- use crate::MyEnum;
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyEnum, MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
@@ -35,9 +33,7 @@ mod outer_box {
}
mod outer_rc {
- use crate::MyEnum;
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyEnum, MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
@@ -54,9 +50,7 @@ mod outer_rc {
}
mod outer_arc {
- use crate::MyEnum;
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyEnum, MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs
index c59422dd9..bffb6f8c0 100644
--- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs
+++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs
@@ -16,9 +16,7 @@ pub enum MyEnum {
}
mod outer_box {
- use crate::MyEnum;
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyEnum, MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
@@ -35,9 +33,7 @@ mod outer_box {
}
mod outer_rc {
- use crate::MyEnum;
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyEnum, MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
@@ -54,9 +50,7 @@ mod outer_rc {
}
mod outer_arc {
- use crate::MyEnum;
- use crate::MyStruct;
- use crate::SubT;
+ use crate::{MyEnum, MyStruct, SubT};
use std::boxed::Box;
use std::rc::Rc;
use std::sync::Arc;
diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr
index 8dd4a6a26..524ca5bf4 100644
--- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr
+++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr
@@ -1,5 +1,5 @@
error: usage of `Box<&T>`
- --> $DIR/redundant_allocation_fixable.rs:26:30
+ --> $DIR/redundant_allocation_fixable.rs:24:30
|
LL | pub fn box_test1<T>(foo: Box<&T>) {}
| ^^^^^^^ help: try: `&T`
@@ -8,7 +8,7 @@ LL | pub fn box_test1<T>(foo: Box<&T>) {}
= note: `-D clippy::redundant-allocation` implied by `-D warnings`
error: usage of `Box<&MyStruct>`
- --> $DIR/redundant_allocation_fixable.rs:28:27
+ --> $DIR/redundant_allocation_fixable.rs:26:27
|
LL | pub fn box_test2(foo: Box<&MyStruct>) {}
| ^^^^^^^^^^^^^^ help: try: `&MyStruct`
@@ -16,7 +16,7 @@ LL | pub fn box_test2(foo: Box<&MyStruct>) {}
= note: `&MyStruct` is already a pointer, `Box<&MyStruct>` allocates a pointer on the heap
error: usage of `Box<&MyEnum>`
- --> $DIR/redundant_allocation_fixable.rs:30:27
+ --> $DIR/redundant_allocation_fixable.rs:28:27
|
LL | pub fn box_test3(foo: Box<&MyEnum>) {}
| ^^^^^^^^^^^^ help: try: `&MyEnum`
@@ -24,7 +24,7 @@ LL | pub fn box_test3(foo: Box<&MyEnum>) {}
= note: `&MyEnum` is already a pointer, `Box<&MyEnum>` allocates a pointer on the heap
error: usage of `Box<Box<T>>`
- --> $DIR/redundant_allocation_fixable.rs:34:30
+ --> $DIR/redundant_allocation_fixable.rs:32:30
|
LL | pub fn box_test5<T>(foo: Box<Box<T>>) {}
| ^^^^^^^^^^^ help: try: `Box<T>`
@@ -32,7 +32,7 @@ LL | pub fn box_test5<T>(foo: Box<Box<T>>) {}
= note: `Box<T>` is already on the heap, `Box<Box<T>>` makes an extra allocation
error: usage of `Rc<&T>`
- --> $DIR/redundant_allocation_fixable.rs:45:29
+ --> $DIR/redundant_allocation_fixable.rs:41:29
|
LL | pub fn rc_test1<T>(foo: Rc<&T>) {}
| ^^^^^^ help: try: `&T`
@@ -40,7 +40,7 @@ LL | pub fn rc_test1<T>(foo: Rc<&T>) {}
= note: `&T` is already a pointer, `Rc<&T>` allocates a pointer on the heap
error: usage of `Rc<&MyStruct>`
- --> $DIR/redundant_allocation_fixable.rs:47:26
+ --> $DIR/redundant_allocation_fixable.rs:43:26
|
LL | pub fn rc_test2(foo: Rc<&MyStruct>) {}
| ^^^^^^^^^^^^^ help: try: `&MyStruct`
@@ -48,7 +48,7 @@ LL | pub fn rc_test2(foo: Rc<&MyStruct>) {}
= note: `&MyStruct` is already a pointer, `Rc<&MyStruct>` allocates a pointer on the heap
error: usage of `Rc<&MyEnum>`
- --> $DIR/redundant_allocation_fixable.rs:49:26
+ --> $DIR/redundant_allocation_fixable.rs:45:26
|
LL | pub fn rc_test3(foo: Rc<&MyEnum>) {}
| ^^^^^^^^^^^ help: try: `&MyEnum`
@@ -56,7 +56,7 @@ LL | pub fn rc_test3(foo: Rc<&MyEnum>) {}
= note: `&MyEnum` is already a pointer, `Rc<&MyEnum>` allocates a pointer on the heap
error: usage of `Rc<Rc<bool>>`
- --> $DIR/redundant_allocation_fixable.rs:53:24
+ --> $DIR/redundant_allocation_fixable.rs:49:24
|
LL | pub fn rc_test6(a: Rc<Rc<bool>>) {}
| ^^^^^^^^^^^^ help: try: `Rc<bool>`
@@ -64,7 +64,7 @@ LL | pub fn rc_test6(a: Rc<Rc<bool>>) {}
= note: `Rc<bool>` is already on the heap, `Rc<Rc<bool>>` makes an extra allocation
error: usage of `Arc<&T>`
- --> $DIR/redundant_allocation_fixable.rs:64:30
+ --> $DIR/redundant_allocation_fixable.rs:58:30
|
LL | pub fn arc_test1<T>(foo: Arc<&T>) {}
| ^^^^^^^ help: try: `&T`
@@ -72,7 +72,7 @@ LL | pub fn arc_test1<T>(foo: Arc<&T>) {}
= note: `&T` is already a pointer, `Arc<&T>` allocates a pointer on the heap
error: usage of `Arc<&MyStruct>`
- --> $DIR/redundant_allocation_fixable.rs:66:27
+ --> $DIR/redundant_allocation_fixable.rs:60:27
|
LL | pub fn arc_test2(foo: Arc<&MyStruct>) {}
| ^^^^^^^^^^^^^^ help: try: `&MyStruct`
@@ -80,7 +80,7 @@ LL | pub fn arc_test2(foo: Arc<&MyStruct>) {}
= note: `&MyStruct` is already a pointer, `Arc<&MyStruct>` allocates a pointer on the heap
error: usage of `Arc<&MyEnum>`
- --> $DIR/redundant_allocation_fixable.rs:68:27
+ --> $DIR/redundant_allocation_fixable.rs:62:27
|
LL | pub fn arc_test3(foo: Arc<&MyEnum>) {}
| ^^^^^^^^^^^^ help: try: `&MyEnum`
@@ -88,7 +88,7 @@ LL | pub fn arc_test3(foo: Arc<&MyEnum>) {}
= note: `&MyEnum` is already a pointer, `Arc<&MyEnum>` allocates a pointer on the heap
error: usage of `Arc<Arc<bool>>`
- --> $DIR/redundant_allocation_fixable.rs:72:25
+ --> $DIR/redundant_allocation_fixable.rs:66:25
|
LL | pub fn arc_test7(a: Arc<Arc<bool>>) {}
| ^^^^^^^^^^^^^^ help: try: `Arc<bool>`
diff --git a/src/tools/clippy/tests/ui/redundant_guards.fixed b/src/tools/clippy/tests/ui/redundant_guards.fixed
new file mode 100644
index 000000000..49d7336ee
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_guards.fixed
@@ -0,0 +1,146 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![feature(if_let_guard)]
+#![allow(clippy::no_effect, unused)]
+#![warn(clippy::redundant_guards)]
+
+#[macro_use]
+extern crate proc_macros;
+
+struct A(u32);
+
+struct B {
+ e: Option<A>,
+}
+
+struct C(u32, u32);
+
+#[derive(PartialEq)]
+struct FloatWrapper(f32);
+fn issue11304() {
+ match 0.1 {
+ x if x == 0.0 => todo!(),
+ _ => todo!(),
+ }
+ match FloatWrapper(0.1) {
+ x if x == FloatWrapper(0.0) => todo!(),
+ _ => todo!(),
+ }
+}
+
+fn main() {
+ let c = C(1, 2);
+ match c {
+ C(x, 1) => ..,
+ _ => todo!(),
+ };
+
+ let x = Some(Some(1));
+ match x {
+ Some(Some(1)) if true => ..,
+ Some(Some(1)) => {
+ println!("a");
+ ..
+ },
+ Some(Some(1)) => ..,
+ Some(Some(2)) => ..,
+ // Don't lint, since x is used in the body
+ Some(x) if let Some(1) = x => {
+ x;
+ ..
+ }
+ _ => todo!(),
+ };
+ let y = 1;
+ match x {
+ // Don't inline these, since y is not from the pat
+ Some(x) if matches!(y, 1 if true) => ..,
+ Some(x) if let 1 = y => ..,
+ Some(x) if y == 2 => ..,
+ _ => todo!(),
+ };
+ let a = A(1);
+ match a {
+ _ if a.0 == 1 => {},
+ _ => todo!(),
+ }
+ let b = B { e: Some(A(0)) };
+ match b {
+ B { e: Some(A(2)) } => ..,
+ _ => todo!(),
+ };
+ // Do not lint, since we cannot represent this as a pattern (at least, without a conversion)
+ let v = Some(vec![1u8, 2, 3]);
+ match v {
+ Some(x) if x == [1] => {},
+ _ => {},
+ }
+
+ external! {
+ let x = Some(Some(1));
+ match x {
+ Some(x) if let Some(1) = x => ..,
+ _ => todo!(),
+ };
+ }
+ with_span! {
+ span
+ let x = Some(Some(1));
+ match x {
+ Some(x) if let Some(1) = x => ..,
+ _ => todo!(),
+ };
+ }
+}
+
+enum E {
+ A(&'static str),
+ B(&'static str),
+ C(&'static str),
+}
+
+fn i() {
+ match E::A("") {
+ // Do not lint
+ E::A(x) | E::B(x) | E::C(x) if x == "from an or pattern" => {},
+ E::A("not from an or pattern") => {},
+ _ => {},
+ };
+}
+
+fn h(v: Option<u32>) {
+ match v {
+ Some(0) => ..,
+ _ => ..,
+ };
+}
+
+// Do not lint
+
+fn f(s: Option<std::ffi::OsString>) {
+ match s {
+ Some(x) if x == "a" => {},
+ _ => {},
+ }
+}
+
+struct S {
+ a: usize,
+}
+
+impl PartialEq for S {
+ fn eq(&self, _: &Self) -> bool {
+ true
+ }
+}
+
+impl Eq for S {}
+
+static CONST_S: S = S { a: 1 };
+
+fn g(opt_s: Option<S>) {
+ match opt_s {
+ Some(x) if x == CONST_S => {},
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_guards.rs b/src/tools/clippy/tests/ui/redundant_guards.rs
new file mode 100644
index 000000000..87761010d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_guards.rs
@@ -0,0 +1,146 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![feature(if_let_guard)]
+#![allow(clippy::no_effect, unused)]
+#![warn(clippy::redundant_guards)]
+
+#[macro_use]
+extern crate proc_macros;
+
+struct A(u32);
+
+struct B {
+ e: Option<A>,
+}
+
+struct C(u32, u32);
+
+#[derive(PartialEq)]
+struct FloatWrapper(f32);
+fn issue11304() {
+ match 0.1 {
+ x if x == 0.0 => todo!(),
+ _ => todo!(),
+ }
+ match FloatWrapper(0.1) {
+ x if x == FloatWrapper(0.0) => todo!(),
+ _ => todo!(),
+ }
+}
+
+fn main() {
+ let c = C(1, 2);
+ match c {
+ C(x, y) if let 1 = y => ..,
+ _ => todo!(),
+ };
+
+ let x = Some(Some(1));
+ match x {
+ Some(x) if matches!(x, Some(1) if true) => ..,
+ Some(x) if matches!(x, Some(1)) => {
+ println!("a");
+ ..
+ },
+ Some(x) if let Some(1) = x => ..,
+ Some(x) if x == Some(2) => ..,
+ // Don't lint, since x is used in the body
+ Some(x) if let Some(1) = x => {
+ x;
+ ..
+ }
+ _ => todo!(),
+ };
+ let y = 1;
+ match x {
+ // Don't inline these, since y is not from the pat
+ Some(x) if matches!(y, 1 if true) => ..,
+ Some(x) if let 1 = y => ..,
+ Some(x) if y == 2 => ..,
+ _ => todo!(),
+ };
+ let a = A(1);
+ match a {
+ _ if a.0 == 1 => {},
+ _ => todo!(),
+ }
+ let b = B { e: Some(A(0)) };
+ match b {
+ B { e } if matches!(e, Some(A(2))) => ..,
+ _ => todo!(),
+ };
+ // Do not lint, since we cannot represent this as a pattern (at least, without a conversion)
+ let v = Some(vec![1u8, 2, 3]);
+ match v {
+ Some(x) if x == [1] => {},
+ _ => {},
+ }
+
+ external! {
+ let x = Some(Some(1));
+ match x {
+ Some(x) if let Some(1) = x => ..,
+ _ => todo!(),
+ };
+ }
+ with_span! {
+ span
+ let x = Some(Some(1));
+ match x {
+ Some(x) if let Some(1) = x => ..,
+ _ => todo!(),
+ };
+ }
+}
+
+enum E {
+ A(&'static str),
+ B(&'static str),
+ C(&'static str),
+}
+
+fn i() {
+ match E::A("") {
+ // Do not lint
+ E::A(x) | E::B(x) | E::C(x) if x == "from an or pattern" => {},
+ E::A(y) if y == "not from an or pattern" => {},
+ _ => {},
+ };
+}
+
+fn h(v: Option<u32>) {
+ match v {
+ x if matches!(x, Some(0)) => ..,
+ _ => ..,
+ };
+}
+
+// Do not lint
+
+fn f(s: Option<std::ffi::OsString>) {
+ match s {
+ Some(x) if x == "a" => {},
+ _ => {},
+ }
+}
+
+struct S {
+ a: usize,
+}
+
+impl PartialEq for S {
+ fn eq(&self, _: &Self) -> bool {
+ true
+ }
+}
+
+impl Eq for S {}
+
+static CONST_S: S = S { a: 1 };
+
+fn g(opt_s: Option<S>) {
+ match opt_s {
+ Some(x) if x == CONST_S => {},
+ _ => {},
+ }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_guards.stderr b/src/tools/clippy/tests/ui/redundant_guards.stderr
new file mode 100644
index 000000000..5bdf43d23
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_guards.stderr
@@ -0,0 +1,98 @@
+error: redundant guard
+ --> $DIR/redundant_guards.rs:34:20
+ |
+LL | C(x, y) if let 1 = y => ..,
+ | ^^^^^^^^^
+ |
+ = note: `-D clippy::redundant-guards` implied by `-D warnings`
+help: try
+ |
+LL - C(x, y) if let 1 = y => ..,
+LL + C(x, 1) => ..,
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:40:20
+ |
+LL | Some(x) if matches!(x, Some(1) if true) => ..,
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL | Some(Some(1)) if true => ..,
+ | ~~~~~~~ ~~~~~~~
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:41:20
+ |
+LL | Some(x) if matches!(x, Some(1)) => {
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(x) if matches!(x, Some(1)) => {
+LL + Some(Some(1)) => {
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:45:20
+ |
+LL | Some(x) if let Some(1) = x => ..,
+ | ^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(x) if let Some(1) = x => ..,
+LL + Some(Some(1)) => ..,
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:46:20
+ |
+LL | Some(x) if x == Some(2) => ..,
+ | ^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - Some(x) if x == Some(2) => ..,
+LL + Some(Some(2)) => ..,
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:69:20
+ |
+LL | B { e } if matches!(e, Some(A(2))) => ..,
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - B { e } if matches!(e, Some(A(2))) => ..,
+LL + B { e: Some(A(2)) } => ..,
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:106:20
+ |
+LL | E::A(y) if y == "not from an or pattern" => {},
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - E::A(y) if y == "not from an or pattern" => {},
+LL + E::A("not from an or pattern") => {},
+ |
+
+error: redundant guard
+ --> $DIR/redundant_guards.rs:113:14
+ |
+LL | x if matches!(x, Some(0)) => ..,
+ | ^^^^^^^^^^^^^^^^^^^^
+ |
+help: try
+ |
+LL - x if matches!(x, Some(0)) => ..,
+LL + Some(0) => ..,
+ |
+
+error: aborting due to 8 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_locals.rs b/src/tools/clippy/tests/ui/redundant_locals.rs
new file mode 100644
index 000000000..80af38f47
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_locals.rs
@@ -0,0 +1,120 @@
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(unused, clippy::no_effect, clippy::needless_pass_by_ref_mut)]
+#![warn(clippy::redundant_locals)]
+
+extern crate proc_macros;
+use proc_macros::{external, with_span};
+
+fn main() {}
+
+fn immutable() {
+ let x = 1;
+ let x = x;
+}
+
+fn mutable() {
+ let mut x = 1;
+ let mut x = x;
+}
+
+fn upgraded_mutability() {
+ let x = 1;
+ let mut x = x;
+}
+
+fn downgraded_mutability() {
+ let mut x = 1;
+ let x = x;
+}
+
+// see #11290
+fn shadow_mutation() {
+ let mut x = 1;
+ {
+ let mut x = x;
+ x = 2;
+ }
+}
+
+fn coercion(par: &mut i32) {
+ let par: &i32 = par;
+
+ let x: &mut i32 = &mut 1;
+ let x: &i32 = x;
+}
+
+fn parameter(x: i32) {
+ let x = x;
+}
+
+fn many() {
+ let x = 1;
+ let x = x;
+ let x = x;
+ let x = x;
+ let x = x;
+}
+
+fn interleaved() {
+ let a = 1;
+ let b = 2;
+ let a = a;
+ let b = b;
+}
+
+fn block() {
+ {
+ let x = 1;
+ let x = x;
+ }
+}
+
+fn closure() {
+ || {
+ let x = 1;
+ let x = x;
+ };
+ |x: i32| {
+ let x = x;
+ };
+}
+
+fn consequential_drop_order() {
+ use std::sync::Mutex;
+
+ let mutex = Mutex::new(1);
+ let guard = mutex.lock().unwrap();
+
+ {
+ let guard = guard;
+ }
+}
+
+fn inconsequential_drop_order() {
+ let x = 1;
+
+ {
+ let x = x;
+ }
+}
+
+fn macros() {
+ macro_rules! rebind {
+ ($x:ident) => {
+ let $x = 1;
+ let $x = $x;
+ };
+ }
+
+ rebind!(x);
+
+ external! {
+ let x = 1;
+ let x = x;
+ }
+ with_span! {
+ span
+ let x = 1;
+ let x = x;
+ }
+}
diff --git a/src/tools/clippy/tests/ui/redundant_locals.stderr b/src/tools/clippy/tests/ui/redundant_locals.stderr
new file mode 100644
index 000000000..587de0575
--- /dev/null
+++ b/src/tools/clippy/tests/ui/redundant_locals.stderr
@@ -0,0 +1,136 @@
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:11:9
+ |
+LL | let x = 1;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+ = note: `-D clippy::redundant-locals` implied by `-D warnings`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:16:9
+ |
+LL | let mut x = 1;
+ | ^^^^^
+LL | let mut x = x;
+ | ^^^^^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:46:14
+ |
+LL | fn parameter(x: i32) {
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:51:9
+ |
+LL | let x = 1;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:52:9
+ |
+LL | let x = x;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:53:9
+ |
+LL | let x = x;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:54:9
+ |
+LL | let x = x;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:59:9
+ |
+LL | let a = 1;
+ | ^
+LL | let b = 2;
+LL | let a = a;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `a`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:60:9
+ |
+LL | let b = 2;
+ | ^
+LL | let a = a;
+LL | let b = b;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `b`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:67:13
+ |
+LL | let x = 1;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:74:13
+ |
+LL | let x = 1;
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:77:6
+ |
+LL | |x: i32| {
+ | ^
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: redundant redefinition of a binding
+ --> $DIR/redundant_locals.rs:94:9
+ |
+LL | let x = 1;
+ | ^
+...
+LL | let x = x;
+ | ^^^^^^^^^^
+ |
+ = help: remove the redefinition of `x`
+
+error: aborting due to 13 previous errors
+
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr
index e9ea3f2e6..28f33f0c9 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr
@@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_drop_order.rs:17:12
|
LL | if let Ok(_) = m.lock() {}
- | -------^^^^^----------- help: try this: `if m.lock().is_ok()`
+ | -------^^^^^----------- help: try: `if m.lock().is_ok()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -12,7 +12,7 @@ error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_drop_order.rs:18:12
|
LL | if let Err(_) = Err::<(), _>(m.lock().unwrap().0) {}
- | -------^^^^^^------------------------------------ help: try this: `if Err::<(), _>(m.lock().unwrap().0).is_err()`
+ | -------^^^^^^------------------------------------ help: try: `if Err::<(), _>(m.lock().unwrap().0).is_err()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -21,7 +21,7 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_drop_order.rs:21:16
|
LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {}
- | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
+ | -------^^^^^----------------------------------------- help: try: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -30,7 +30,7 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_drop_order.rs:23:12
|
LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {
- | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
+ | -------^^^^^----------------------------------------- help: try: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -39,31 +39,31 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_drop_order.rs:26:12
|
LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {}
- | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
+ | -------^^^^^----------------------------------------- help: try: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_drop_order.rs:27:12
|
LL | if let Err(_) = Err::<std::sync::MutexGuard<()>, _>(()) {}
- | -------^^^^^^------------------------------------------ help: try this: `if Err::<std::sync::MutexGuard<()>, _>(()).is_err()`
+ | -------^^^^^^------------------------------------------ help: try: `if Err::<std::sync::MutexGuard<()>, _>(()).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_drop_order.rs:29:12
|
LL | if let Ok(_) = Ok::<_, ()>(String::new()) {}
- | -------^^^^^----------------------------- help: try this: `if Ok::<_, ()>(String::new()).is_ok()`
+ | -------^^^^^----------------------------- help: try: `if Ok::<_, ()>(String::new()).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_drop_order.rs:30:12
|
LL | if let Err(_) = Err::<(), _>((String::new(), ())) {}
- | -------^^^^^^------------------------------------ help: try this: `if Err::<(), _>((String::new(), ())).is_err()`
+ | -------^^^^^^------------------------------------ help: try: `if Err::<(), _>((String::new(), ())).is_err()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_drop_order.rs:33:12
|
LL | if let Some(_) = Some(m.lock()) {}
- | -------^^^^^^^----------------- help: try this: `if Some(m.lock()).is_some()`
+ | -------^^^^^^^----------------- help: try: `if Some(m.lock()).is_some()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -72,7 +72,7 @@ error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_drop_order.rs:34:12
|
LL | if let Some(_) = Some(m.lock().unwrap().0) {}
- | -------^^^^^^^---------------------------- help: try this: `if Some(m.lock().unwrap().0).is_some()`
+ | -------^^^^^^^---------------------------- help: try: `if Some(m.lock().unwrap().0).is_some()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -81,7 +81,7 @@ error: redundant pattern matching, consider using `is_none()`
--> $DIR/redundant_pattern_matching_drop_order.rs:37:16
|
LL | if let None = None::<std::sync::MutexGuard<()>> {}
- | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()`
+ | -------^^^^------------------------------------ help: try: `if None::<std::sync::MutexGuard<()>>.is_none()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -90,7 +90,7 @@ error: redundant pattern matching, consider using `is_none()`
--> $DIR/redundant_pattern_matching_drop_order.rs:39:12
|
LL | if let None = None::<std::sync::MutexGuard<()>> {
- | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()`
+ | -------^^^^------------------------------------ help: try: `if None::<std::sync::MutexGuard<()>>.is_none()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -99,25 +99,25 @@ error: redundant pattern matching, consider using `is_none()`
--> $DIR/redundant_pattern_matching_drop_order.rs:43:12
|
LL | if let None = None::<std::sync::MutexGuard<()>> {}
- | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()`
+ | -------^^^^------------------------------------ help: try: `if None::<std::sync::MutexGuard<()>>.is_none()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_drop_order.rs:45:12
|
LL | if let Some(_) = Some(String::new()) {}
- | -------^^^^^^^---------------------- help: try this: `if Some(String::new()).is_some()`
+ | -------^^^^^^^---------------------- help: try: `if Some(String::new()).is_some()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_drop_order.rs:46:12
|
LL | if let Some(_) = Some((String::new(), ())) {}
- | -------^^^^^^^---------------------------- help: try this: `if Some((String::new(), ())).is_some()`
+ | -------^^^^^^^---------------------------- help: try: `if Some((String::new(), ())).is_some()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_drop_order.rs:49:12
|
LL | if let Ready(_) = Ready(m.lock()) {}
- | -------^^^^^^^^------------------ help: try this: `if Ready(m.lock()).is_ready()`
+ | -------^^^^^^^^------------------ help: try: `if Ready(m.lock()).is_ready()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -126,7 +126,7 @@ error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_drop_order.rs:50:12
|
LL | if let Ready(_) = Ready(m.lock().unwrap().0) {}
- | -------^^^^^^^^----------------------------- help: try this: `if Ready(m.lock().unwrap().0).is_ready()`
+ | -------^^^^^^^^----------------------------- help: try: `if Ready(m.lock().unwrap().0).is_ready()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -135,7 +135,7 @@ error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_drop_order.rs:53:16
|
LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {}
- | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
+ | -------^^^^^^^--------------------------------------- help: try: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -144,7 +144,7 @@ error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_drop_order.rs:55:12
|
LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {
- | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
+ | -------^^^^^^^--------------------------------------- help: try: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
|
= note: this will change drop order of the result, as well as all temporaries
= note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important
@@ -153,19 +153,19 @@ error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_drop_order.rs:59:12
|
LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {}
- | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
+ | -------^^^^^^^--------------------------------------- help: try: `if Pending::<std::sync::MutexGuard<()>>.is_pending()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_drop_order.rs:61:12
|
LL | if let Ready(_) = Ready(String::new()) {}
- | -------^^^^^^^^----------------------- help: try this: `if Ready(String::new()).is_ready()`
+ | -------^^^^^^^^----------------------- help: try: `if Ready(String::new()).is_ready()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_drop_order.rs:62:12
|
LL | if let Ready(_) = Ready((String::new(), ())) {}
- | -------^^^^^^^^----------------------------- help: try this: `if Ready((String::new(), ())).is_ready()`
+ | -------^^^^^^^^----------------------------- help: try: `if Ready((String::new(), ())).is_ready()`
error: aborting due to 22 previous errors
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed
index 75ed14344..02f197aa2 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed
@@ -8,10 +8,8 @@
clippy::uninlined_format_args
)]
-use std::net::{
- IpAddr::{self, V4, V6},
- Ipv4Addr, Ipv6Addr,
-};
+use std::net::IpAddr::{self, V4, V6};
+use std::net::{Ipv4Addr, Ipv6Addr};
fn main() {
let ipaddr: IpAddr = V4(Ipv4Addr::LOCALHOST);
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs
index 9ac77409f..5c1e1810f 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs
@@ -8,10 +8,8 @@
clippy::uninlined_format_args
)]
-use std::net::{
- IpAddr::{self, V4, V6},
- Ipv4Addr, Ipv6Addr,
-};
+use std::net::IpAddr::{self, V4, V6};
+use std::net::{Ipv4Addr, Ipv6Addr};
fn main() {
let ipaddr: IpAddr = V4(Ipv4Addr::LOCALHOST);
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr
index 6d1fb2964..bec8d3088 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr
@@ -1,130 +1,130 @@
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:18:12
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:16:12
|
LL | if let V4(_) = &ipaddr {}
- | -------^^^^^---------- help: try this: `if ipaddr.is_ipv4()`
+ | -------^^^^^---------- help: try: `if ipaddr.is_ipv4()`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:20:12
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:18:12
|
LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
- | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:22:12
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:20:12
|
LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
- | -------^^^^^-------------------------- help: try this: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+ | -------^^^^^-------------------------- help: try: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:24:15
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:22:15
|
LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
- | ----------^^^^^-------------------------- help: try this: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | ----------^^^^^-------------------------- help: try: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:26:15
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:24:15
|
LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
- | ----------^^^^^-------------------------- help: try this: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+ | ----------^^^^^-------------------------- help: try: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:36:5
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:34:5
|
LL | / match V4(Ipv4Addr::LOCALHOST) {
LL | | V4(_) => true,
LL | | V6(_) => false,
LL | | };
- | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:41:5
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:39:5
|
LL | / match V4(Ipv4Addr::LOCALHOST) {
LL | | V4(_) => false,
LL | | V6(_) => true,
LL | | };
- | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv6()`
+ | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv6()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:46:5
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:44:5
|
LL | / match V6(Ipv6Addr::LOCALHOST) {
LL | | V4(_) => false,
LL | | V6(_) => true,
LL | | };
- | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+ | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv6()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:51:5
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:49:5
|
LL | / match V6(Ipv6Addr::LOCALHOST) {
LL | | V4(_) => true,
LL | | V6(_) => false,
LL | | };
- | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv4()`
+ | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:56:20
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:54:20
|
LL | let _ = if let V4(_) = V4(Ipv4Addr::LOCALHOST) {
- | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:64:20
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:62:20
|
LL | let _ = if let V4(_) = gen_ipaddr() {
- | -------^^^^^--------------- help: try this: `if gen_ipaddr().is_ipv4()`
+ | -------^^^^^--------------- help: try: `if gen_ipaddr().is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:66:19
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:64:19
|
LL | } else if let V6(_) = gen_ipaddr() {
- | -------^^^^^--------------- help: try this: `if gen_ipaddr().is_ipv6()`
+ | -------^^^^^--------------- help: try: `if gen_ipaddr().is_ipv6()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:78:12
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:76:12
|
LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
- | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:80:12
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:78:12
|
LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
- | -------^^^^^-------------------------- help: try this: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+ | -------^^^^^-------------------------- help: try: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:82:15
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:80:15
|
LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {}
- | ----------^^^^^-------------------------- help: try this: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | ----------^^^^^-------------------------- help: try: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:84:15
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:82:15
|
LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {}
- | ----------^^^^^-------------------------- help: try this: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+ | ----------^^^^^-------------------------- help: try: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()`
error: redundant pattern matching, consider using `is_ipv4()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:86:5
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:84:5
|
LL | / match V4(Ipv4Addr::LOCALHOST) {
LL | | V4(_) => true,
LL | | V6(_) => false,
LL | | };
- | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv4()`
+ | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()`
error: redundant pattern matching, consider using `is_ipv6()`
- --> $DIR/redundant_pattern_matching_ipaddr.rs:91:5
+ --> $DIR/redundant_pattern_matching_ipaddr.rs:89:5
|
LL | / match V6(Ipv6Addr::LOCALHOST) {
LL | | V4(_) => false,
LL | | V6(_) => true,
LL | | };
- | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv6()`
+ | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv6()`
error: aborting due to 18 previous errors
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed
index a63ba5809..d9fcd98c5 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed
@@ -10,6 +10,20 @@
clippy::equatable_if_let,
clippy::if_same_then_else
)]
+#![feature(let_chains, if_let_guard)]
+
+fn issue_11174<T>(boolean: bool, maybe_some: Option<T>) -> bool {
+ maybe_some.is_none() && (!boolean)
+}
+
+fn issue_11174_edge_cases<T>(boolean: bool, boolean2: bool, maybe_some: Option<T>) {
+ let _ = maybe_some.is_none() && (boolean || boolean2); // guard needs parentheses
+ let _ = match maybe_some { // can't use `matches!` here
+ // because `expr` metavars in macros don't allow let exprs
+ None if let Some(x) = Some(0) && x > 5 => true,
+ _ => false
+ };
+}
fn main() {
if None::<()>.is_none() {}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs
index 631f90916..cbd9494f1 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs
@@ -10,6 +10,20 @@
clippy::equatable_if_let,
clippy::if_same_then_else
)]
+#![feature(let_chains, if_let_guard)]
+
+fn issue_11174<T>(boolean: bool, maybe_some: Option<T>) -> bool {
+ matches!(maybe_some, None if !boolean)
+}
+
+fn issue_11174_edge_cases<T>(boolean: bool, boolean2: bool, maybe_some: Option<T>) {
+ let _ = matches!(maybe_some, None if boolean || boolean2); // guard needs parentheses
+ let _ = match maybe_some { // can't use `matches!` here
+ // because `expr` metavars in macros don't allow let exprs
+ None if let Some(x) = Some(0) && x > 5 => true,
+ _ => false
+ };
+}
fn main() {
if let None = None::<()> {}
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr
index 717b603c4..b0e43924d 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr
@@ -1,200 +1,212 @@
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:15:12
+ --> $DIR/redundant_pattern_matching_option.rs:16:5
|
-LL | if let None = None::<()> {}
- | -------^^^^------------- help: try this: `if None::<()>.is_none()`
+LL | matches!(maybe_some, None if !boolean)
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `maybe_some.is_none() && (!boolean)`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:20:13
+ |
+LL | let _ = matches!(maybe_some, None if boolean || boolean2); // guard needs parentheses
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `maybe_some.is_none() && (boolean || boolean2)`
+
+error: redundant pattern matching, consider using `is_none()`
+ --> $DIR/redundant_pattern_matching_option.rs:29:12
+ |
+LL | if let None = None::<()> {}
+ | -------^^^^------------- help: try: `if None::<()>.is_none()`
+
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:17:12
+ --> $DIR/redundant_pattern_matching_option.rs:31:12
|
LL | if let Some(_) = Some(42) {}
- | -------^^^^^^^----------- help: try this: `if Some(42).is_some()`
+ | -------^^^^^^^----------- help: try: `if Some(42).is_some()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:19:12
+ --> $DIR/redundant_pattern_matching_option.rs:33:12
|
LL | if let Some(_) = Some(42) {
- | -------^^^^^^^----------- help: try this: `if Some(42).is_some()`
+ | -------^^^^^^^----------- help: try: `if Some(42).is_some()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:25:15
+ --> $DIR/redundant_pattern_matching_option.rs:39:15
|
LL | while let Some(_) = Some(42) {}
- | ----------^^^^^^^----------- help: try this: `while Some(42).is_some()`
+ | ----------^^^^^^^----------- help: try: `while Some(42).is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:27:15
+ --> $DIR/redundant_pattern_matching_option.rs:41:15
|
LL | while let None = Some(42) {}
- | ----------^^^^----------- help: try this: `while Some(42).is_none()`
+ | ----------^^^^----------- help: try: `while Some(42).is_none()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:29:15
+ --> $DIR/redundant_pattern_matching_option.rs:43:15
|
LL | while let None = None::<()> {}
- | ----------^^^^------------- help: try this: `while None::<()>.is_none()`
+ | ----------^^^^------------- help: try: `while None::<()>.is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:32:15
+ --> $DIR/redundant_pattern_matching_option.rs:46:15
|
LL | while let Some(_) = v.pop() {
- | ----------^^^^^^^---------- help: try this: `while v.pop().is_some()`
+ | ----------^^^^^^^---------- help: try: `while v.pop().is_some()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:40:5
+ --> $DIR/redundant_pattern_matching_option.rs:54:5
|
LL | / match Some(42) {
LL | | Some(_) => true,
LL | | None => false,
LL | | };
- | |_____^ help: try this: `Some(42).is_some()`
+ | |_____^ help: try: `Some(42).is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:45:5
+ --> $DIR/redundant_pattern_matching_option.rs:59:5
|
LL | / match None::<()> {
LL | | Some(_) => false,
LL | | None => true,
LL | | };
- | |_____^ help: try this: `None::<()>.is_none()`
+ | |_____^ help: try: `None::<()>.is_none()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:50:13
+ --> $DIR/redundant_pattern_matching_option.rs:64:13
|
LL | let _ = match None::<()> {
| _____________^
LL | | Some(_) => false,
LL | | None => true,
LL | | };
- | |_____^ help: try this: `None::<()>.is_none()`
+ | |_____^ help: try: `None::<()>.is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:56:20
+ --> $DIR/redundant_pattern_matching_option.rs:70:20
|
LL | let _ = if let Some(_) = opt { true } else { false };
- | -------^^^^^^^------ help: try this: `if opt.is_some()`
+ | -------^^^^^^^------ help: try: `if opt.is_some()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:62:20
+ --> $DIR/redundant_pattern_matching_option.rs:76:20
|
LL | let _ = if let Some(_) = gen_opt() {
- | -------^^^^^^^------------ help: try this: `if gen_opt().is_some()`
+ | -------^^^^^^^------------ help: try: `if gen_opt().is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:64:19
+ --> $DIR/redundant_pattern_matching_option.rs:78:19
|
LL | } else if let None = gen_opt() {
- | -------^^^^------------ help: try this: `if gen_opt().is_none()`
+ | -------^^^^------------ help: try: `if gen_opt().is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:70:12
+ --> $DIR/redundant_pattern_matching_option.rs:84:12
|
LL | if let Some(..) = gen_opt() {}
- | -------^^^^^^^^------------ help: try this: `if gen_opt().is_some()`
+ | -------^^^^^^^^------------ help: try: `if gen_opt().is_some()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:85:12
+ --> $DIR/redundant_pattern_matching_option.rs:99:12
|
LL | if let Some(_) = Some(42) {}
- | -------^^^^^^^----------- help: try this: `if Some(42).is_some()`
+ | -------^^^^^^^----------- help: try: `if Some(42).is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:87:12
+ --> $DIR/redundant_pattern_matching_option.rs:101:12
|
LL | if let None = None::<()> {}
- | -------^^^^------------- help: try this: `if None::<()>.is_none()`
+ | -------^^^^------------- help: try: `if None::<()>.is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:89:15
+ --> $DIR/redundant_pattern_matching_option.rs:103:15
|
LL | while let Some(_) = Some(42) {}
- | ----------^^^^^^^----------- help: try this: `while Some(42).is_some()`
+ | ----------^^^^^^^----------- help: try: `while Some(42).is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:91:15
+ --> $DIR/redundant_pattern_matching_option.rs:105:15
|
LL | while let None = None::<()> {}
- | ----------^^^^------------- help: try this: `while None::<()>.is_none()`
+ | ----------^^^^------------- help: try: `while None::<()>.is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:93:5
+ --> $DIR/redundant_pattern_matching_option.rs:107:5
|
LL | / match Some(42) {
LL | | Some(_) => true,
LL | | None => false,
LL | | };
- | |_____^ help: try this: `Some(42).is_some()`
+ | |_____^ help: try: `Some(42).is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:98:5
+ --> $DIR/redundant_pattern_matching_option.rs:112:5
|
LL | / match None::<()> {
LL | | Some(_) => false,
LL | | None => true,
LL | | };
- | |_____^ help: try this: `None::<()>.is_none()`
+ | |_____^ help: try: `None::<()>.is_none()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:106:12
+ --> $DIR/redundant_pattern_matching_option.rs:120:12
|
LL | if let None = *(&None::<()>) {}
- | -------^^^^----------------- help: try this: `if (&None::<()>).is_none()`
+ | -------^^^^----------------- help: try: `if (&None::<()>).is_none()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:107:12
+ --> $DIR/redundant_pattern_matching_option.rs:121:12
|
LL | if let None = *&None::<()> {}
- | -------^^^^--------------- help: try this: `if (&None::<()>).is_none()`
+ | -------^^^^--------------- help: try: `if (&None::<()>).is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:113:5
+ --> $DIR/redundant_pattern_matching_option.rs:127:5
|
LL | / match x {
LL | | Some(_) => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `x.is_some()`
+ | |_____^ help: try: `x.is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:118:5
+ --> $DIR/redundant_pattern_matching_option.rs:132:5
|
LL | / match x {
LL | | None => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `x.is_none()`
+ | |_____^ help: try: `x.is_none()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:123:5
+ --> $DIR/redundant_pattern_matching_option.rs:137:5
|
LL | / match x {
LL | | Some(_) => false,
LL | | _ => true,
LL | | };
- | |_____^ help: try this: `x.is_none()`
+ | |_____^ help: try: `x.is_none()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:128:5
+ --> $DIR/redundant_pattern_matching_option.rs:142:5
|
LL | / match x {
LL | | None => false,
LL | | _ => true,
LL | | };
- | |_____^ help: try this: `x.is_some()`
+ | |_____^ help: try: `x.is_some()`
error: redundant pattern matching, consider using `is_some()`
- --> $DIR/redundant_pattern_matching_option.rs:143:13
+ --> $DIR/redundant_pattern_matching_option.rs:157:13
|
LL | let _ = matches!(x, Some(_));
- | ^^^^^^^^^^^^^^^^^^^^ help: try this: `x.is_some()`
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `x.is_some()`
error: redundant pattern matching, consider using `is_none()`
- --> $DIR/redundant_pattern_matching_option.rs:145:13
+ --> $DIR/redundant_pattern_matching_option.rs:159:13
|
LL | let _ = matches!(x, None);
- | ^^^^^^^^^^^^^^^^^ help: try this: `x.is_none()`
+ | ^^^^^^^^^^^^^^^^^ help: try: `x.is_none()`
-error: aborting due to 28 previous errors
+error: aborting due to 30 previous errors
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr
index b89fde35f..28d3606c4 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr
@@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:17:12
|
LL | if let Pending = Pending::<()> {}
- | -------^^^^^^^---------------- help: try this: `if Pending::<()>.is_pending()`
+ | -------^^^^^^^---------------- help: try: `if Pending::<()>.is_pending()`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
@@ -10,31 +10,31 @@ error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:19:12
|
LL | if let Ready(_) = Ready(42) {}
- | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()`
+ | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:21:12
|
LL | if let Ready(_) = Ready(42) {
- | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()`
+ | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:27:15
|
LL | while let Ready(_) = Ready(42) {}
- | ----------^^^^^^^^------------ help: try this: `while Ready(42).is_ready()`
+ | ----------^^^^^^^^------------ help: try: `while Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:29:15
|
LL | while let Pending = Ready(42) {}
- | ----------^^^^^^^------------ help: try this: `while Ready(42).is_pending()`
+ | ----------^^^^^^^------------ help: try: `while Ready(42).is_pending()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:31:15
|
LL | while let Pending = Pending::<()> {}
- | ----------^^^^^^^---------------- help: try this: `while Pending::<()>.is_pending()`
+ | ----------^^^^^^^---------------- help: try: `while Pending::<()>.is_pending()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:37:5
@@ -43,7 +43,7 @@ LL | / match Ready(42) {
LL | | Ready(_) => true,
LL | | Pending => false,
LL | | };
- | |_____^ help: try this: `Ready(42).is_ready()`
+ | |_____^ help: try: `Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:42:5
@@ -52,7 +52,7 @@ LL | / match Pending::<()> {
LL | | Ready(_) => false,
LL | | Pending => true,
LL | | };
- | |_____^ help: try this: `Pending::<()>.is_pending()`
+ | |_____^ help: try: `Pending::<()>.is_pending()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:47:13
@@ -62,49 +62,49 @@ LL | let _ = match Pending::<()> {
LL | | Ready(_) => false,
LL | | Pending => true,
LL | | };
- | |_____^ help: try this: `Pending::<()>.is_pending()`
+ | |_____^ help: try: `Pending::<()>.is_pending()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:53:20
|
LL | let _ = if let Ready(_) = poll { true } else { false };
- | -------^^^^^^^^------- help: try this: `if poll.is_ready()`
+ | -------^^^^^^^^------- help: try: `if poll.is_ready()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:57:20
|
LL | let _ = if let Ready(_) = gen_poll() {
- | -------^^^^^^^^------------- help: try this: `if gen_poll().is_ready()`
+ | -------^^^^^^^^------------- help: try: `if gen_poll().is_ready()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:59:19
|
LL | } else if let Pending = gen_poll() {
- | -------^^^^^^^------------- help: try this: `if gen_poll().is_pending()`
+ | -------^^^^^^^------------- help: try: `if gen_poll().is_pending()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:75:12
|
LL | if let Ready(_) = Ready(42) {}
- | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()`
+ | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:77:12
|
LL | if let Pending = Pending::<()> {}
- | -------^^^^^^^---------------- help: try this: `if Pending::<()>.is_pending()`
+ | -------^^^^^^^---------------- help: try: `if Pending::<()>.is_pending()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:79:15
|
LL | while let Ready(_) = Ready(42) {}
- | ----------^^^^^^^^------------ help: try this: `while Ready(42).is_ready()`
+ | ----------^^^^^^^^------------ help: try: `while Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:81:15
|
LL | while let Pending = Pending::<()> {}
- | ----------^^^^^^^---------------- help: try this: `while Pending::<()>.is_pending()`
+ | ----------^^^^^^^---------------- help: try: `while Pending::<()>.is_pending()`
error: redundant pattern matching, consider using `is_ready()`
--> $DIR/redundant_pattern_matching_poll.rs:83:5
@@ -113,7 +113,7 @@ LL | / match Ready(42) {
LL | | Ready(_) => true,
LL | | Pending => false,
LL | | };
- | |_____^ help: try this: `Ready(42).is_ready()`
+ | |_____^ help: try: `Ready(42).is_ready()`
error: redundant pattern matching, consider using `is_pending()`
--> $DIR/redundant_pattern_matching_poll.rs:88:5
@@ -122,7 +122,7 @@ LL | / match Pending::<()> {
LL | | Ready(_) => false,
LL | | Pending => true,
LL | | };
- | |_____^ help: try this: `Pending::<()>.is_pending()`
+ | |_____^ help: try: `Pending::<()>.is_pending()`
error: aborting due to 18 previous errors
diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
index f6ce666bb..2b1ce9f54 100644
--- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
+++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr
@@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:16:12
|
LL | if let Ok(_) = &result {}
- | -------^^^^^---------- help: try this: `if result.is_ok()`
+ | -------^^^^^---------- help: try: `if result.is_ok()`
|
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
@@ -10,25 +10,25 @@ error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:18:12
|
LL | if let Ok(_) = Ok::<i32, i32>(42) {}
- | -------^^^^^--------------------- help: try this: `if Ok::<i32, i32>(42).is_ok()`
+ | -------^^^^^--------------------- help: try: `if Ok::<i32, i32>(42).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:20:12
|
LL | if let Err(_) = Err::<i32, i32>(42) {}
- | -------^^^^^^---------------------- help: try this: `if Err::<i32, i32>(42).is_err()`
+ | -------^^^^^^---------------------- help: try: `if Err::<i32, i32>(42).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:22:15
|
LL | while let Ok(_) = Ok::<i32, i32>(10) {}
- | ----------^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_ok()`
+ | ----------^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:24:15
|
LL | while let Err(_) = Ok::<i32, i32>(10) {}
- | ----------^^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_err()`
+ | ----------^^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:34:5
@@ -37,7 +37,7 @@ LL | / match Ok::<i32, i32>(42) {
LL | | Ok(_) => true,
LL | | Err(_) => false,
LL | | };
- | |_____^ help: try this: `Ok::<i32, i32>(42).is_ok()`
+ | |_____^ help: try: `Ok::<i32, i32>(42).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:39:5
@@ -46,7 +46,7 @@ LL | / match Ok::<i32, i32>(42) {
LL | | Ok(_) => false,
LL | | Err(_) => true,
LL | | };
- | |_____^ help: try this: `Ok::<i32, i32>(42).is_err()`
+ | |_____^ help: try: `Ok::<i32, i32>(42).is_err()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:44:5
@@ -55,7 +55,7 @@ LL | / match Err::<i32, i32>(42) {
LL | | Ok(_) => false,
LL | | Err(_) => true,
LL | | };
- | |_____^ help: try this: `Err::<i32, i32>(42).is_err()`
+ | |_____^ help: try: `Err::<i32, i32>(42).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:49:5
@@ -64,73 +64,73 @@ LL | / match Err::<i32, i32>(42) {
LL | | Ok(_) => true,
LL | | Err(_) => false,
LL | | };
- | |_____^ help: try this: `Err::<i32, i32>(42).is_ok()`
+ | |_____^ help: try: `Err::<i32, i32>(42).is_ok()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:54:20
|
LL | let _ = if let Ok(_) = Ok::<usize, ()>(4) { true } else { false };
- | -------^^^^^--------------------- help: try this: `if Ok::<usize, ()>(4).is_ok()`
+ | -------^^^^^--------------------- help: try: `if Ok::<usize, ()>(4).is_ok()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:62:20
|
LL | let _ = if let Ok(_) = gen_res() {
- | -------^^^^^------------ help: try this: `if gen_res().is_ok()`
+ | -------^^^^^------------ help: try: `if gen_res().is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:64:19
|
LL | } else if let Err(_) = gen_res() {
- | -------^^^^^^------------ help: try this: `if gen_res().is_err()`
+ | -------^^^^^^------------ help: try: `if gen_res().is_err()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:87:19
|
LL | while let Some(_) = r#try!(result_opt()) {}
- | ----------^^^^^^^----------------------- help: try this: `while r#try!(result_opt()).is_some()`
+ | ----------^^^^^^^----------------------- help: try: `while r#try!(result_opt()).is_some()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:88:16
|
LL | if let Some(_) = r#try!(result_opt()) {}
- | -------^^^^^^^----------------------- help: try this: `if r#try!(result_opt()).is_some()`
+ | -------^^^^^^^----------------------- help: try: `if r#try!(result_opt()).is_some()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:94:12
|
LL | if let Some(_) = m!() {}
- | -------^^^^^^^------- help: try this: `if m!().is_some()`
+ | -------^^^^^^^------- help: try: `if m!().is_some()`
error: redundant pattern matching, consider using `is_some()`
--> $DIR/redundant_pattern_matching_result.rs:95:15
|
LL | while let Some(_) = m!() {}
- | ----------^^^^^^^------- help: try this: `while m!().is_some()`
+ | ----------^^^^^^^------- help: try: `while m!().is_some()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:113:12
|
LL | if let Ok(_) = Ok::<i32, i32>(42) {}
- | -------^^^^^--------------------- help: try this: `if Ok::<i32, i32>(42).is_ok()`
+ | -------^^^^^--------------------- help: try: `if Ok::<i32, i32>(42).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:115:12
|
LL | if let Err(_) = Err::<i32, i32>(42) {}
- | -------^^^^^^---------------------- help: try this: `if Err::<i32, i32>(42).is_err()`
+ | -------^^^^^^---------------------- help: try: `if Err::<i32, i32>(42).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:117:15
|
LL | while let Ok(_) = Ok::<i32, i32>(10) {}
- | ----------^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_ok()`
+ | ----------^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:119:15
|
LL | while let Err(_) = Ok::<i32, i32>(10) {}
- | ----------^^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_err()`
+ | ----------^^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:121:5
@@ -139,7 +139,7 @@ LL | / match Ok::<i32, i32>(42) {
LL | | Ok(_) => true,
LL | | Err(_) => false,
LL | | };
- | |_____^ help: try this: `Ok::<i32, i32>(42).is_ok()`
+ | |_____^ help: try: `Ok::<i32, i32>(42).is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:126:5
@@ -148,7 +148,7 @@ LL | / match Err::<i32, i32>(42) {
LL | | Ok(_) => false,
LL | | Err(_) => true,
LL | | };
- | |_____^ help: try this: `Err::<i32, i32>(42).is_err()`
+ | |_____^ help: try: `Err::<i32, i32>(42).is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:136:5
@@ -157,7 +157,7 @@ LL | / match x {
LL | | Ok(_) => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `x.is_ok()`
+ | |_____^ help: try: `x.is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:141:5
@@ -166,7 +166,7 @@ LL | / match x {
LL | | Ok(_) => false,
LL | | _ => true,
LL | | };
- | |_____^ help: try this: `x.is_err()`
+ | |_____^ help: try: `x.is_err()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:146:5
@@ -175,7 +175,7 @@ LL | / match x {
LL | | Err(_) => true,
LL | | _ => false,
LL | | };
- | |_____^ help: try this: `x.is_err()`
+ | |_____^ help: try: `x.is_err()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:151:5
@@ -184,19 +184,19 @@ LL | / match x {
LL | | Err(_) => false,
LL | | _ => true,
LL | | };
- | |_____^ help: try this: `x.is_ok()`
+ | |_____^ help: try: `x.is_ok()`
error: redundant pattern matching, consider using `is_ok()`
--> $DIR/redundant_pattern_matching_result.rs:172:13
|
LL | let _ = matches!(x, Ok(_));
- | ^^^^^^^^^^^^^^^^^^ help: try this: `x.is_ok()`
+ | ^^^^^^^^^^^^^^^^^^ help: try: `x.is_ok()`
error: redundant pattern matching, consider using `is_err()`
--> $DIR/redundant_pattern_matching_result.rs:174:13
|
LL | let _ = matches!(x, Err(_));
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `x.is_err()`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `x.is_err()`
error: aborting due to 28 previous errors
diff --git a/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr b/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr
index eb36cd516..016feb103 100644
--- a/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr
+++ b/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr
@@ -5,7 +5,7 @@ LL | Some(ref x) => x,
| ^^^^^
|
= note: `-D clippy::ref-binding-to-reference` implied by `-D warnings`
-help: try this
+help: try
|
LL | Some(x) => &x,
| ~ ~~
@@ -16,7 +16,7 @@ error: this pattern creates a reference to a reference
LL | Some(ref x) => {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ Some(x) => {
LL | f1(x);
@@ -30,7 +30,7 @@ error: this pattern creates a reference to a reference
LL | Some(ref x) => m2!(x),
| ^^^^^
|
-help: try this
+help: try
|
LL | Some(x) => m2!(&x),
| ~ ~~
@@ -41,7 +41,7 @@ error: this pattern creates a reference to a reference
LL | let _ = |&ref x: &&String| {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ let _ = |&x: &&String| {
LL ~ let _: &&String = &x;
@@ -53,7 +53,7 @@ error: this pattern creates a reference to a reference
LL | fn f2<'a>(&ref x: &&'a String) -> &'a String {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ fn f2<'a>(&x: &&'a String) -> &'a String {
LL ~ let _: &&String = &x;
@@ -66,7 +66,7 @@ error: this pattern creates a reference to a reference
LL | fn f(&ref x: &&String) {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ fn f(&x: &&String) {
LL ~ let _: &&String = &x;
@@ -78,7 +78,7 @@ error: this pattern creates a reference to a reference
LL | fn f(&ref x: &&String) {
| ^^^^^
|
-help: try this
+help: try
|
LL ~ fn f(&x: &&String) {
LL ~ let _: &&String = &x;
diff --git a/src/tools/clippy/tests/ui/rename.fixed b/src/tools/clippy/tests/ui/rename.fixed
index cc2295ea5..e78b9e5c9 100644
--- a/src/tools/clippy/tests/ui/rename.fixed
+++ b/src/tools/clippy/tests/ui/rename.fixed
@@ -27,16 +27,18 @@
#![allow(clippy::single_char_add_str)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::recursive_format_impl)]
+#![allow(clippy::unwrap_or_default)]
#![allow(clippy::invisible_characters)]
+#![allow(invalid_reference_casting)]
#![allow(suspicious_double_ref_op)]
#![allow(invalid_nan_comparisons)]
-#![allow(invalid_reference_casting)]
#![allow(drop_bounds)]
#![allow(dropping_copy_types)]
#![allow(dropping_references)]
#![allow(for_loops_over_fallibles)]
#![allow(forgetting_copy_types)]
#![allow(forgetting_references)]
+#![allow(useless_ptr_null_checks)]
#![allow(array_into_iter)]
#![allow(invalid_atomic_ordering)]
#![allow(invalid_value)]
@@ -77,6 +79,7 @@
#![warn(clippy::single_char_add_str)]
#![warn(clippy::module_name_repetitions)]
#![warn(clippy::recursive_format_impl)]
+#![warn(clippy::unwrap_or_default)]
#![warn(clippy::invisible_characters)]
#![warn(invalid_reference_casting)]
#![warn(suspicious_double_ref_op)]
@@ -89,6 +92,7 @@
#![warn(for_loops_over_fallibles)]
#![warn(forgetting_copy_types)]
#![warn(forgetting_references)]
+#![warn(useless_ptr_null_checks)]
#![warn(array_into_iter)]
#![warn(invalid_atomic_ordering)]
#![warn(invalid_value)]
diff --git a/src/tools/clippy/tests/ui/rename.rs b/src/tools/clippy/tests/ui/rename.rs
index 399335aff..2e6ef60cb 100644
--- a/src/tools/clippy/tests/ui/rename.rs
+++ b/src/tools/clippy/tests/ui/rename.rs
@@ -27,16 +27,18 @@
#![allow(clippy::single_char_add_str)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::recursive_format_impl)]
+#![allow(clippy::unwrap_or_default)]
#![allow(clippy::invisible_characters)]
+#![allow(invalid_reference_casting)]
#![allow(suspicious_double_ref_op)]
#![allow(invalid_nan_comparisons)]
-#![allow(invalid_reference_casting)]
#![allow(drop_bounds)]
#![allow(dropping_copy_types)]
#![allow(dropping_references)]
#![allow(for_loops_over_fallibles)]
#![allow(forgetting_copy_types)]
#![allow(forgetting_references)]
+#![allow(useless_ptr_null_checks)]
#![allow(array_into_iter)]
#![allow(invalid_atomic_ordering)]
#![allow(invalid_value)]
@@ -77,6 +79,7 @@
#![warn(clippy::single_char_push_str)]
#![warn(clippy::stutter)]
#![warn(clippy::to_string_in_display)]
+#![warn(clippy::unwrap_or_else_default)]
#![warn(clippy::zero_width_space)]
#![warn(clippy::cast_ref_to_mut)]
#![warn(clippy::clone_double_ref)]
@@ -89,6 +92,7 @@
#![warn(clippy::for_loops_over_fallibles)]
#![warn(clippy::forget_copy)]
#![warn(clippy::forget_ref)]
+#![warn(clippy::fn_null_check)]
#![warn(clippy::into_iter_on_array)]
#![warn(clippy::invalid_atomic_ordering)]
#![warn(clippy::invalid_ref)]
diff --git a/src/tools/clippy/tests/ui/rename.stderr b/src/tools/clippy/tests/ui/rename.stderr
index 079371330..57e991e56 100644
--- a/src/tools/clippy/tests/ui/rename.stderr
+++ b/src/tools/clippy/tests/ui/rename.stderr
@@ -1,5 +1,5 @@
error: lint `clippy::almost_complete_letter_range` has been renamed to `clippy::almost_complete_range`
- --> $DIR/rename.rs:52:9
+ --> $DIR/rename.rs:54:9
|
LL | #![warn(clippy::almost_complete_letter_range)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::almost_complete_range`
@@ -7,310 +7,322 @@ LL | #![warn(clippy::almost_complete_letter_range)]
= note: `-D renamed-and-removed-lints` implied by `-D warnings`
error: lint `clippy::blacklisted_name` has been renamed to `clippy::disallowed_names`
- --> $DIR/rename.rs:53:9
+ --> $DIR/rename.rs:55:9
|
LL | #![warn(clippy::blacklisted_name)]
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_names`
error: lint `clippy::block_in_if_condition_expr` has been renamed to `clippy::blocks_in_if_conditions`
- --> $DIR/rename.rs:54:9
+ --> $DIR/rename.rs:56:9
|
LL | #![warn(clippy::block_in_if_condition_expr)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_if_conditions`
error: lint `clippy::block_in_if_condition_stmt` has been renamed to `clippy::blocks_in_if_conditions`
- --> $DIR/rename.rs:55:9
+ --> $DIR/rename.rs:57:9
|
LL | #![warn(clippy::block_in_if_condition_stmt)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_if_conditions`
error: lint `clippy::box_vec` has been renamed to `clippy::box_collection`
- --> $DIR/rename.rs:56:9
+ --> $DIR/rename.rs:58:9
|
LL | #![warn(clippy::box_vec)]
| ^^^^^^^^^^^^^^^ help: use the new name: `clippy::box_collection`
error: lint `clippy::const_static_lifetime` has been renamed to `clippy::redundant_static_lifetimes`
- --> $DIR/rename.rs:57:9
+ --> $DIR/rename.rs:59:9
|
LL | #![warn(clippy::const_static_lifetime)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_static_lifetimes`
error: lint `clippy::cyclomatic_complexity` has been renamed to `clippy::cognitive_complexity`
- --> $DIR/rename.rs:58:9
+ --> $DIR/rename.rs:60:9
|
LL | #![warn(clippy::cyclomatic_complexity)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::cognitive_complexity`
error: lint `clippy::derive_hash_xor_eq` has been renamed to `clippy::derived_hash_with_manual_eq`
- --> $DIR/rename.rs:59:9
+ --> $DIR/rename.rs:61:9
|
LL | #![warn(clippy::derive_hash_xor_eq)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::derived_hash_with_manual_eq`
error: lint `clippy::disallowed_method` has been renamed to `clippy::disallowed_methods`
- --> $DIR/rename.rs:60:9
+ --> $DIR/rename.rs:62:9
|
LL | #![warn(clippy::disallowed_method)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_methods`
error: lint `clippy::disallowed_type` has been renamed to `clippy::disallowed_types`
- --> $DIR/rename.rs:61:9
+ --> $DIR/rename.rs:63:9
|
LL | #![warn(clippy::disallowed_type)]
| ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_types`
error: lint `clippy::eval_order_dependence` has been renamed to `clippy::mixed_read_write_in_expression`
- --> $DIR/rename.rs:62:9
+ --> $DIR/rename.rs:64:9
|
LL | #![warn(clippy::eval_order_dependence)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::mixed_read_write_in_expression`
error: lint `clippy::identity_conversion` has been renamed to `clippy::useless_conversion`
- --> $DIR/rename.rs:63:9
+ --> $DIR/rename.rs:65:9
|
LL | #![warn(clippy::identity_conversion)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::useless_conversion`
error: lint `clippy::if_let_some_result` has been renamed to `clippy::match_result_ok`
- --> $DIR/rename.rs:64:9
+ --> $DIR/rename.rs:66:9
|
LL | #![warn(clippy::if_let_some_result)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::match_result_ok`
error: lint `clippy::integer_arithmetic` has been renamed to `clippy::arithmetic_side_effects`
- --> $DIR/rename.rs:65:9
+ --> $DIR/rename.rs:67:9
|
LL | #![warn(clippy::integer_arithmetic)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::arithmetic_side_effects`
error: lint `clippy::logic_bug` has been renamed to `clippy::overly_complex_bool_expr`
- --> $DIR/rename.rs:66:9
+ --> $DIR/rename.rs:68:9
|
LL | #![warn(clippy::logic_bug)]
| ^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::overly_complex_bool_expr`
error: lint `clippy::new_without_default_derive` has been renamed to `clippy::new_without_default`
- --> $DIR/rename.rs:67:9
+ --> $DIR/rename.rs:69:9
|
LL | #![warn(clippy::new_without_default_derive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::new_without_default`
error: lint `clippy::option_and_then_some` has been renamed to `clippy::bind_instead_of_map`
- --> $DIR/rename.rs:68:9
+ --> $DIR/rename.rs:70:9
|
LL | #![warn(clippy::option_and_then_some)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::bind_instead_of_map`
error: lint `clippy::option_expect_used` has been renamed to `clippy::expect_used`
- --> $DIR/rename.rs:69:9
+ --> $DIR/rename.rs:71:9
|
LL | #![warn(clippy::option_expect_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used`
error: lint `clippy::option_map_unwrap_or` has been renamed to `clippy::map_unwrap_or`
- --> $DIR/rename.rs:70:9
+ --> $DIR/rename.rs:72:9
|
LL | #![warn(clippy::option_map_unwrap_or)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
error: lint `clippy::option_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or`
- --> $DIR/rename.rs:71:9
+ --> $DIR/rename.rs:73:9
|
LL | #![warn(clippy::option_map_unwrap_or_else)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
error: lint `clippy::option_unwrap_used` has been renamed to `clippy::unwrap_used`
- --> $DIR/rename.rs:72:9
+ --> $DIR/rename.rs:74:9
|
LL | #![warn(clippy::option_unwrap_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used`
error: lint `clippy::ref_in_deref` has been renamed to `clippy::needless_borrow`
- --> $DIR/rename.rs:73:9
+ --> $DIR/rename.rs:75:9
|
LL | #![warn(clippy::ref_in_deref)]
| ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::needless_borrow`
error: lint `clippy::result_expect_used` has been renamed to `clippy::expect_used`
- --> $DIR/rename.rs:74:9
+ --> $DIR/rename.rs:76:9
|
LL | #![warn(clippy::result_expect_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used`
error: lint `clippy::result_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or`
- --> $DIR/rename.rs:75:9
+ --> $DIR/rename.rs:77:9
|
LL | #![warn(clippy::result_map_unwrap_or_else)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or`
error: lint `clippy::result_unwrap_used` has been renamed to `clippy::unwrap_used`
- --> $DIR/rename.rs:76:9
+ --> $DIR/rename.rs:78:9
|
LL | #![warn(clippy::result_unwrap_used)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used`
error: lint `clippy::single_char_push_str` has been renamed to `clippy::single_char_add_str`
- --> $DIR/rename.rs:77:9
+ --> $DIR/rename.rs:79:9
|
LL | #![warn(clippy::single_char_push_str)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::single_char_add_str`
error: lint `clippy::stutter` has been renamed to `clippy::module_name_repetitions`
- --> $DIR/rename.rs:78:9
+ --> $DIR/rename.rs:80:9
|
LL | #![warn(clippy::stutter)]
| ^^^^^^^^^^^^^^^ help: use the new name: `clippy::module_name_repetitions`
error: lint `clippy::to_string_in_display` has been renamed to `clippy::recursive_format_impl`
- --> $DIR/rename.rs:79:9
+ --> $DIR/rename.rs:81:9
|
LL | #![warn(clippy::to_string_in_display)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::recursive_format_impl`
+error: lint `clippy::unwrap_or_else_default` has been renamed to `clippy::unwrap_or_default`
+ --> $DIR/rename.rs:82:9
+ |
+LL | #![warn(clippy::unwrap_or_else_default)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_or_default`
+
error: lint `clippy::zero_width_space` has been renamed to `clippy::invisible_characters`
- --> $DIR/rename.rs:80:9
+ --> $DIR/rename.rs:83:9
|
LL | #![warn(clippy::zero_width_space)]
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::invisible_characters`
error: lint `clippy::cast_ref_to_mut` has been renamed to `invalid_reference_casting`
- --> $DIR/rename.rs:81:9
+ --> $DIR/rename.rs:84:9
|
LL | #![warn(clippy::cast_ref_to_mut)]
| ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_reference_casting`
error: lint `clippy::clone_double_ref` has been renamed to `suspicious_double_ref_op`
- --> $DIR/rename.rs:82:9
+ --> $DIR/rename.rs:85:9
|
LL | #![warn(clippy::clone_double_ref)]
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `suspicious_double_ref_op`
error: lint `clippy::cmp_nan` has been renamed to `invalid_nan_comparisons`
- --> $DIR/rename.rs:83:9
+ --> $DIR/rename.rs:86:9
|
LL | #![warn(clippy::cmp_nan)]
| ^^^^^^^^^^^^^^^ help: use the new name: `invalid_nan_comparisons`
error: lint `clippy::drop_bounds` has been renamed to `drop_bounds`
- --> $DIR/rename.rs:84:9
+ --> $DIR/rename.rs:87:9
|
LL | #![warn(clippy::drop_bounds)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `drop_bounds`
error: lint `clippy::drop_copy` has been renamed to `dropping_copy_types`
- --> $DIR/rename.rs:85:9
+ --> $DIR/rename.rs:88:9
|
LL | #![warn(clippy::drop_copy)]
| ^^^^^^^^^^^^^^^^^ help: use the new name: `dropping_copy_types`
error: lint `clippy::drop_ref` has been renamed to `dropping_references`
- --> $DIR/rename.rs:86:9
+ --> $DIR/rename.rs:89:9
|
LL | #![warn(clippy::drop_ref)]
| ^^^^^^^^^^^^^^^^ help: use the new name: `dropping_references`
error: lint `clippy::for_loop_over_option` has been renamed to `for_loops_over_fallibles`
- --> $DIR/rename.rs:87:9
+ --> $DIR/rename.rs:90:9
|
LL | #![warn(clippy::for_loop_over_option)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles`
error: lint `clippy::for_loop_over_result` has been renamed to `for_loops_over_fallibles`
- --> $DIR/rename.rs:88:9
+ --> $DIR/rename.rs:91:9
|
LL | #![warn(clippy::for_loop_over_result)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles`
error: lint `clippy::for_loops_over_fallibles` has been renamed to `for_loops_over_fallibles`
- --> $DIR/rename.rs:89:9
+ --> $DIR/rename.rs:92:9
|
LL | #![warn(clippy::for_loops_over_fallibles)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles`
error: lint `clippy::forget_copy` has been renamed to `forgetting_copy_types`
- --> $DIR/rename.rs:90:9
+ --> $DIR/rename.rs:93:9
|
LL | #![warn(clippy::forget_copy)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_copy_types`
error: lint `clippy::forget_ref` has been renamed to `forgetting_references`
- --> $DIR/rename.rs:91:9
+ --> $DIR/rename.rs:94:9
|
LL | #![warn(clippy::forget_ref)]
| ^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_references`
+error: lint `clippy::fn_null_check` has been renamed to `useless_ptr_null_checks`
+ --> $DIR/rename.rs:95:9
+ |
+LL | #![warn(clippy::fn_null_check)]
+ | ^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `useless_ptr_null_checks`
+
error: lint `clippy::into_iter_on_array` has been renamed to `array_into_iter`
- --> $DIR/rename.rs:92:9
+ --> $DIR/rename.rs:96:9
|
LL | #![warn(clippy::into_iter_on_array)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `array_into_iter`
error: lint `clippy::invalid_atomic_ordering` has been renamed to `invalid_atomic_ordering`
- --> $DIR/rename.rs:93:9
+ --> $DIR/rename.rs:97:9
|
LL | #![warn(clippy::invalid_atomic_ordering)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_atomic_ordering`
error: lint `clippy::invalid_ref` has been renamed to `invalid_value`
- --> $DIR/rename.rs:94:9
+ --> $DIR/rename.rs:98:9
|
LL | #![warn(clippy::invalid_ref)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_value`
error: lint `clippy::invalid_utf8_in_unchecked` has been renamed to `invalid_from_utf8_unchecked`
- --> $DIR/rename.rs:95:9
+ --> $DIR/rename.rs:99:9
|
LL | #![warn(clippy::invalid_utf8_in_unchecked)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_from_utf8_unchecked`
error: lint `clippy::let_underscore_drop` has been renamed to `let_underscore_drop`
- --> $DIR/rename.rs:96:9
+ --> $DIR/rename.rs:100:9
|
LL | #![warn(clippy::let_underscore_drop)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `let_underscore_drop`
error: lint `clippy::mem_discriminant_non_enum` has been renamed to `enum_intrinsics_non_enums`
- --> $DIR/rename.rs:97:9
+ --> $DIR/rename.rs:101:9
|
LL | #![warn(clippy::mem_discriminant_non_enum)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `enum_intrinsics_non_enums`
error: lint `clippy::panic_params` has been renamed to `non_fmt_panics`
- --> $DIR/rename.rs:98:9
+ --> $DIR/rename.rs:102:9
|
LL | #![warn(clippy::panic_params)]
| ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `non_fmt_panics`
error: lint `clippy::positional_named_format_parameters` has been renamed to `named_arguments_used_positionally`
- --> $DIR/rename.rs:99:9
+ --> $DIR/rename.rs:103:9
|
LL | #![warn(clippy::positional_named_format_parameters)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `named_arguments_used_positionally`
error: lint `clippy::temporary_cstring_as_ptr` has been renamed to `temporary_cstring_as_ptr`
- --> $DIR/rename.rs:100:9
+ --> $DIR/rename.rs:104:9
|
LL | #![warn(clippy::temporary_cstring_as_ptr)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `temporary_cstring_as_ptr`
error: lint `clippy::undropped_manually_drops` has been renamed to `undropped_manually_drops`
- --> $DIR/rename.rs:101:9
+ --> $DIR/rename.rs:105:9
|
LL | #![warn(clippy::undropped_manually_drops)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `undropped_manually_drops`
error: lint `clippy::unknown_clippy_lints` has been renamed to `unknown_lints`
- --> $DIR/rename.rs:102:9
+ --> $DIR/rename.rs:106:9
|
LL | #![warn(clippy::unknown_clippy_lints)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unknown_lints`
error: lint `clippy::unused_label` has been renamed to `unused_labels`
- --> $DIR/rename.rs:103:9
+ --> $DIR/rename.rs:107:9
|
LL | #![warn(clippy::unused_label)]
| ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unused_labels`
-error: aborting due to 52 previous errors
+error: aborting due to 54 previous errors
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.fixed b/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
index 119ff2591..6850eeb7a 100644
--- a/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.fixed
@@ -15,5 +15,5 @@ fn main() {
// A non-Some `f` closure where the argument is not used as the
// return should not emit the lint
let opt: Result<u32, &str> = Ok(1);
- opt.map_or(None, |_x| Some(1));
+ _ = opt.map_or(None, |_x| Some(1));
}
diff --git a/src/tools/clippy/tests/ui/result_map_or_into_option.rs b/src/tools/clippy/tests/ui/result_map_or_into_option.rs
index eeeef830a..8e1518144 100644
--- a/src/tools/clippy/tests/ui/result_map_or_into_option.rs
+++ b/src/tools/clippy/tests/ui/result_map_or_into_option.rs
@@ -15,5 +15,5 @@ fn main() {
// A non-Some `f` closure where the argument is not used as the
// return should not emit the lint
let opt: Result<u32, &str> = Ok(1);
- opt.map_or(None, |_x| Some(1));
+ _ = opt.map_or(None, |_x| Some(1));
}
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr
index 782febd52..ad941fa8b 100644
--- a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr
@@ -4,7 +4,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns
LL | x.field.map(do_nothing);
| ^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(x_field) = x.field { do_nothing(x_field) }`
+ | help: try: `if let Ok(x_field) = x.field { do_nothing(x_field) }`
|
= note: `-D clippy::result-map-unit-fn` implied by `-D warnings`
@@ -14,7 +14,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns
LL | x.field.map(do_nothing);
| ^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(x_field) = x.field { do_nothing(x_field) }`
+ | help: try: `if let Ok(x_field) = x.field { do_nothing(x_field) }`
error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:39:5
@@ -22,7 +22,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns
LL | x.field.map(diverge);
| ^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(x_field) = x.field { diverge(x_field) }`
+ | help: try: `if let Ok(x_field) = x.field { diverge(x_field) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:45:5
@@ -30,7 +30,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| x.do_result_nothing(value + captured));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { x.do_result_nothing(value + captured) }`
+ | help: try: `if let Ok(value) = x.field { x.do_result_nothing(value + captured) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:47:5
@@ -38,7 +38,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { x.do_result_plus_one(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { x.do_result_plus_one(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { x.do_result_plus_one(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:50:5
@@ -46,7 +46,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| do_nothing(value + captured));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured) }`
+ | help: try: `if let Ok(value) = x.field { do_nothing(value + captured) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:52:5
@@ -54,7 +54,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { do_nothing(value + captured) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured) }`
+ | help: try: `if let Ok(value) = x.field { do_nothing(value + captured) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:54:5
@@ -62,7 +62,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { do_nothing(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { do_nothing(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:56:5
@@ -70,7 +70,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { { do_nothing(value + captured); } });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { do_nothing(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:59:5
@@ -78,7 +78,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| diverge(value + captured));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { diverge(value + captured) }`
+ | help: try: `if let Ok(value) = x.field { diverge(value + captured) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:61:5
@@ -86,7 +86,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { diverge(value + captured) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { diverge(value + captured) }`
+ | help: try: `if let Ok(value) = x.field { diverge(value + captured) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:63:5
@@ -94,7 +94,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { diverge(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { diverge(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { diverge(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:65:5
@@ -102,7 +102,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { { diverge(value + captured); } });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { diverge(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { diverge(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:70:5
@@ -110,7 +110,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { let y = plus_one(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { let y = plus_one(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { let y = plus_one(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:72:5
@@ -118,7 +118,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { plus_one(value + captured); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { plus_one(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { plus_one(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:74:5
@@ -126,7 +126,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { { plus_one(value + captured); } });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { plus_one(value + captured); }`
+ | help: try: `if let Ok(value) = x.field { plus_one(value + captured); }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:77:5
@@ -134,7 +134,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|ref value| { do_nothing(value + captured) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(ref value) = x.field { do_nothing(value + captured) }`
+ | help: try: `if let Ok(ref value) = x.field { do_nothing(value + captured) }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_fixable.rs:79:5
@@ -142,7 +142,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| println!("{:?}", value));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { println!("{:?}", value) }`
+ | help: try: `if let Ok(value) = x.field { println!("{:?}", value) }`
error: aborting due to 18 previous errors
diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr
index d0e534f63..75ec1ba80 100644
--- a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr
+++ b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr
@@ -4,7 +4,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { do_nothing(value); do_nothing(value) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { ... }`
+ | help: try: `if let Ok(value) = x.field { ... }`
|
= note: `-D clippy::result-map-unit-fn` implied by `-D warnings`
@@ -14,7 +14,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| if value > 0 { do_nothing(value); do_nothing(value) });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { ... }`
+ | help: try: `if let Ok(value) = x.field { ... }`
error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()`
--> $DIR/result_map_unit_fn_unfixable.rs:29:5
@@ -23,7 +23,7 @@ LL | // x.field.map(|value| {
LL | || do_nothing(value);
LL | || do_nothing(value)
LL | || });
- | ||______^- help: try this: `if let Ok(value) = x.field { ... }`
+ | ||______^- help: try: `if let Ok(value) = x.field { ... }`
| |______|
|
@@ -33,7 +33,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns
LL | x.field.map(|value| { do_nothing(value); do_nothing(value); });
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(value) = x.field { ... }`
+ | help: try: `if let Ok(value) = x.field { ... }`
error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
--> $DIR/result_map_unit_fn_unfixable.rs:37:5
@@ -41,7 +41,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns
LL | "12".parse::<i32>().map(diverge);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(a) = "12".parse::<i32>() { diverge(a) }`
+ | help: try: `if let Ok(a) = "12".parse::<i32>() { diverge(a) }`
error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()`
--> $DIR/result_map_unit_fn_unfixable.rs:43:5
@@ -49,7 +49,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns
LL | y.map(do_nothing);
| ^^^^^^^^^^^^^^^^^-
| |
- | help: try this: `if let Ok(_y) = y { do_nothing(_y) }`
+ | help: try: `if let Ok(_y) = y { do_nothing(_y) }`
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/self_assignment.rs b/src/tools/clippy/tests/ui/self_assignment.rs
index ec3ae1209..a7f9fbaae 100644
--- a/src/tools/clippy/tests/ui/self_assignment.rs
+++ b/src/tools/clippy/tests/ui/self_assignment.rs
@@ -1,5 +1,5 @@
#![warn(clippy::self_assignment)]
-#![allow(clippy::useless_vec)]
+#![allow(clippy::useless_vec, clippy::needless_pass_by_ref_mut)]
pub struct S<'a> {
a: i32,
@@ -14,7 +14,7 @@ pub fn positives(mut a: usize, b: &mut u32, mut s: S) {
*b = *b;
s = s;
s.a = s.a;
- s.b[10] = s.b[5 + 5];
+ s.b[9] = s.b[5 + 4];
s.c[0][1] = s.c[0][1];
s.b[a] = s.b[a];
*s.e = *s.e;
diff --git a/src/tools/clippy/tests/ui/self_assignment.stderr b/src/tools/clippy/tests/ui/self_assignment.stderr
index bed88244e..25b8569fa 100644
--- a/src/tools/clippy/tests/ui/self_assignment.stderr
+++ b/src/tools/clippy/tests/ui/self_assignment.stderr
@@ -24,11 +24,11 @@ error: self-assignment of `s.a` to `s.a`
LL | s.a = s.a;
| ^^^^^^^^^
-error: self-assignment of `s.b[5 + 5]` to `s.b[10]`
+error: self-assignment of `s.b[5 + 4]` to `s.b[9]`
--> $DIR/self_assignment.rs:17:5
|
-LL | s.b[10] = s.b[5 + 5];
- | ^^^^^^^^^^^^^^^^^^^^
+LL | s.b[9] = s.b[5 + 4];
+ | ^^^^^^^^^^^^^^^^^^^
error: self-assignment of `s.c[0][1]` to `s.c[0][1]`
--> $DIR/self_assignment.rs:18:5
diff --git a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.fixed b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.fixed
new file mode 100644
index 000000000..653f4533b
--- /dev/null
+++ b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.fixed
@@ -0,0 +1,123 @@
+//@run-rustfix
+#![warn(clippy::semicolon_if_nothing_returned)]
+#![allow(clippy::redundant_closure, clippy::uninlined_format_args, clippy::needless_late_init)]
+
+fn get_unit() {}
+
+// the functions below trigger the lint
+fn main() {
+ println!("Hello");
+}
+
+fn hello() {
+ get_unit();
+}
+
+fn basic101(x: i32) {
+ let y: i32;
+ y = x + 1;
+}
+
+#[rustfmt::skip]
+fn closure_error() {
+ let _d = || {
+ hello();
+ };
+}
+
+#[rustfmt::skip]
+fn unsafe_checks_error() {
+ use std::mem::MaybeUninit;
+ use std::ptr;
+
+ let mut s = MaybeUninit::<String>::uninit();
+ let _d = || unsafe {
+ ptr::drop_in_place(s.as_mut_ptr());
+ };
+}
+
+// this is fine
+fn print_sum(a: i32, b: i32) {
+ println!("{}", a + b);
+ assert_eq!(true, false);
+}
+
+fn foo(x: i32) {
+ let y: i32;
+ if x < 1 {
+ y = 4;
+ } else {
+ y = 5;
+ }
+}
+
+fn bar(x: i32) {
+ let y: i32;
+ match x {
+ 1 => y = 4,
+ _ => y = 32,
+ }
+}
+
+fn foobar(x: i32) {
+ let y: i32;
+ 'label: {
+ y = x + 1;
+ }
+}
+
+fn loop_test(x: i32) {
+ let y: i32;
+ for &ext in &["stdout", "stderr", "fixed"] {
+ println!("{}", ext);
+ }
+}
+
+fn closure() {
+ let _d = || hello();
+}
+
+#[rustfmt::skip]
+fn closure_block() {
+ let _d = || { hello() };
+}
+
+unsafe fn some_unsafe_op() {}
+unsafe fn some_other_unsafe_fn() {}
+
+fn do_something() {
+ unsafe { some_unsafe_op() };
+
+ unsafe { some_other_unsafe_fn() };
+}
+
+fn unsafe_checks() {
+ use std::mem::MaybeUninit;
+ use std::ptr;
+
+ let mut s = MaybeUninit::<String>::uninit();
+ let _d = || unsafe { ptr::drop_in_place(s.as_mut_ptr()) };
+}
+
+// Issue #7768
+#[rustfmt::skip]
+fn macro_with_semicolon() {
+ macro_rules! repro {
+ () => {
+ while false {
+ }
+ };
+ }
+ repro!();
+}
+
+fn function_returning_option() -> Option<i32> {
+ Some(1)
+}
+
+// No warning
+fn let_else_stmts() {
+ let Some(x) = function_returning_option() else {
+ return;
+ };
+}
diff --git a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs
index 4ab7dbab5..9db038219 100644
--- a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs
+++ b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs
@@ -1,5 +1,6 @@
+//@run-rustfix
#![warn(clippy::semicolon_if_nothing_returned)]
-#![allow(clippy::redundant_closure, clippy::uninlined_format_args)]
+#![allow(clippy::redundant_closure, clippy::uninlined_format_args, clippy::needless_late_init)]
fn get_unit() {}
@@ -116,5 +117,7 @@ fn function_returning_option() -> Option<i32> {
// No warning
fn let_else_stmts() {
- let Some(x) = function_returning_option() else { return; };
+ let Some(x) = function_returning_option() else {
+ return;
+ };
}
diff --git a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr
index 8d9a67585..78813e7cc 100644
--- a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr
+++ b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.stderr
@@ -1,5 +1,5 @@
error: consider adding a `;` to the last statement for consistent formatting
- --> $DIR/semicolon_if_nothing_returned.rs:8:5
+ --> $DIR/semicolon_if_nothing_returned.rs:9:5
|
LL | println!("Hello")
| ^^^^^^^^^^^^^^^^^ help: add a `;` here: `println!("Hello");`
@@ -7,25 +7,25 @@ LL | println!("Hello")
= note: `-D clippy::semicolon-if-nothing-returned` implied by `-D warnings`
error: consider adding a `;` to the last statement for consistent formatting
- --> $DIR/semicolon_if_nothing_returned.rs:12:5
+ --> $DIR/semicolon_if_nothing_returned.rs:13:5
|
LL | get_unit()
| ^^^^^^^^^^ help: add a `;` here: `get_unit();`
error: consider adding a `;` to the last statement for consistent formatting
- --> $DIR/semicolon_if_nothing_returned.rs:17:5
+ --> $DIR/semicolon_if_nothing_returned.rs:18:5
|
LL | y = x + 1
| ^^^^^^^^^ help: add a `;` here: `y = x + 1;`
error: consider adding a `;` to the last statement for consistent formatting
- --> $DIR/semicolon_if_nothing_returned.rs:23:9
+ --> $DIR/semicolon_if_nothing_returned.rs:24:9
|
LL | hello()
| ^^^^^^^ help: add a `;` here: `hello();`
error: consider adding a `;` to the last statement for consistent formatting
- --> $DIR/semicolon_if_nothing_returned.rs:34:9
+ --> $DIR/semicolon_if_nothing_returned.rs:35:9
|
LL | ptr::drop_in_place(s.as_mut_ptr())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add a `;` here: `ptr::drop_in_place(s.as_mut_ptr());`
diff --git a/src/tools/clippy/tests/ui/shadow.rs b/src/tools/clippy/tests/ui/shadow.rs
index 9be8c5e59..1b40a43d0 100644
--- a/src/tools/clippy/tests/ui/shadow.rs
+++ b/src/tools/clippy/tests/ui/shadow.rs
@@ -1,7 +1,12 @@
//@aux-build:proc_macro_derive.rs:proc-macro
#![warn(clippy::shadow_same, clippy::shadow_reuse, clippy::shadow_unrelated)]
-#![allow(clippy::let_unit_value, clippy::needless_if)]
+#![allow(
+ clippy::let_unit_value,
+ clippy::needless_if,
+ clippy::redundant_guards,
+ clippy::redundant_locals
+)]
extern crate proc_macro_derive;
diff --git a/src/tools/clippy/tests/ui/shadow.stderr b/src/tools/clippy/tests/ui/shadow.stderr
index 8321f6df2..88b02f53b 100644
--- a/src/tools/clippy/tests/ui/shadow.stderr
+++ b/src/tools/clippy/tests/ui/shadow.stderr
@@ -1,278 +1,278 @@
error: `x` is shadowed by itself in `x`
- --> $DIR/shadow.rs:19:9
+ --> $DIR/shadow.rs:24:9
|
LL | let x = x;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:18:9
+ --> $DIR/shadow.rs:23:9
|
LL | let x = 1;
| ^
= note: `-D clippy::shadow-same` implied by `-D warnings`
error: `mut x` is shadowed by itself in `&x`
- --> $DIR/shadow.rs:20:13
+ --> $DIR/shadow.rs:25:13
|
LL | let mut x = &x;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:19:9
+ --> $DIR/shadow.rs:24:9
|
LL | let x = x;
| ^
error: `x` is shadowed by itself in `&mut x`
- --> $DIR/shadow.rs:21:9
+ --> $DIR/shadow.rs:26:9
|
LL | let x = &mut x;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:20:9
+ --> $DIR/shadow.rs:25:9
|
LL | let mut x = &x;
| ^^^^^
error: `x` is shadowed by itself in `*x`
- --> $DIR/shadow.rs:22:9
+ --> $DIR/shadow.rs:27:9
|
LL | let x = *x;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:21:9
+ --> $DIR/shadow.rs:26:9
|
LL | let x = &mut x;
| ^
error: `x` is shadowed
- --> $DIR/shadow.rs:27:9
+ --> $DIR/shadow.rs:32:9
|
LL | let x = x.0;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:26:9
+ --> $DIR/shadow.rs:31:9
|
LL | let x = ([[0]], ());
| ^
= note: `-D clippy::shadow-reuse` implied by `-D warnings`
error: `x` is shadowed
- --> $DIR/shadow.rs:28:9
+ --> $DIR/shadow.rs:33:9
|
LL | let x = x[0];
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:27:9
+ --> $DIR/shadow.rs:32:9
|
LL | let x = x.0;
| ^
error: `x` is shadowed
- --> $DIR/shadow.rs:29:10
+ --> $DIR/shadow.rs:34:10
|
LL | let [x] = x;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:28:9
+ --> $DIR/shadow.rs:33:9
|
LL | let x = x[0];
| ^
error: `x` is shadowed
- --> $DIR/shadow.rs:30:9
+ --> $DIR/shadow.rs:35:9
|
LL | let x = Some(x);
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:29:10
+ --> $DIR/shadow.rs:34:10
|
LL | let [x] = x;
| ^
error: `x` is shadowed
- --> $DIR/shadow.rs:31:9
+ --> $DIR/shadow.rs:36:9
|
LL | let x = foo(x);
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:30:9
+ --> $DIR/shadow.rs:35:9
|
LL | let x = Some(x);
| ^
error: `x` is shadowed
- --> $DIR/shadow.rs:32:9
+ --> $DIR/shadow.rs:37:9
|
LL | let x = || x;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:31:9
+ --> $DIR/shadow.rs:36:9
|
LL | let x = foo(x);
| ^
error: `x` is shadowed
- --> $DIR/shadow.rs:33:9
+ --> $DIR/shadow.rs:38:9
|
LL | let x = Some(1).map(|_| x)?;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:32:9
+ --> $DIR/shadow.rs:37:9
|
LL | let x = || x;
| ^
error: `y` is shadowed
- --> $DIR/shadow.rs:35:9
+ --> $DIR/shadow.rs:40:9
|
LL | let y = match y {
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:34:9
+ --> $DIR/shadow.rs:39:9
|
LL | let y = 1;
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:50:9
+ --> $DIR/shadow.rs:55:9
|
LL | let x = 2;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:49:9
+ --> $DIR/shadow.rs:54:9
|
LL | let x = 1;
| ^
= note: `-D clippy::shadow-unrelated` implied by `-D warnings`
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:55:13
+ --> $DIR/shadow.rs:60:13
|
LL | let x = 1;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:54:10
+ --> $DIR/shadow.rs:59:10
|
LL | fn f(x: u32) {
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:60:14
+ --> $DIR/shadow.rs:65:14
|
LL | Some(x) => {
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:57:9
+ --> $DIR/shadow.rs:62:9
|
LL | let x = 1;
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:61:17
+ --> $DIR/shadow.rs:66:17
|
LL | let x = 1;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:60:14
+ --> $DIR/shadow.rs:65:14
|
LL | Some(x) => {
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:65:17
+ --> $DIR/shadow.rs:70:17
|
LL | if let Some(x) = Some(1) {}
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:57:9
+ --> $DIR/shadow.rs:62:9
|
LL | let x = 1;
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:66:20
+ --> $DIR/shadow.rs:71:20
|
LL | while let Some(x) = Some(1) {}
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:57:9
+ --> $DIR/shadow.rs:62:9
|
LL | let x = 1;
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:67:15
+ --> $DIR/shadow.rs:72:15
|
LL | let _ = |[x]: [u32; 1]| {
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:57:9
+ --> $DIR/shadow.rs:62:9
|
LL | let x = 1;
| ^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:68:13
+ --> $DIR/shadow.rs:73:13
|
LL | let x = 1;
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:67:15
+ --> $DIR/shadow.rs:72:15
|
LL | let _ = |[x]: [u32; 1]| {
| ^
error: `y` is shadowed
- --> $DIR/shadow.rs:71:17
+ --> $DIR/shadow.rs:76:17
|
LL | if let Some(y) = y {}
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:70:9
+ --> $DIR/shadow.rs:75:9
|
LL | let y = Some(1);
| ^
error: `_b` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:107:9
+ --> $DIR/shadow.rs:112:9
|
LL | let _b = _a;
| ^^
|
note: previous binding is here
- --> $DIR/shadow.rs:106:28
+ --> $DIR/shadow.rs:111:28
|
LL | pub async fn foo2(_a: i32, _b: i64) {
| ^^
error: `x` shadows a previous, unrelated binding
- --> $DIR/shadow.rs:113:21
+ --> $DIR/shadow.rs:118:21
|
LL | if let Some(x) = Some(1) { x } else { 1 }
| ^
|
note: previous binding is here
- --> $DIR/shadow.rs:112:13
+ --> $DIR/shadow.rs:117:13
|
LL | let x = 1;
| ^
diff --git a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs
index 8c48b21f1..17df9f88f 100644
--- a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs
+++ b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs
@@ -7,8 +7,7 @@
use std::num::ParseIntError;
use std::ops::Deref;
use std::sync::atomic::{AtomicU64, Ordering};
-use std::sync::RwLock;
-use std::sync::{Mutex, MutexGuard};
+use std::sync::{Mutex, MutexGuard, RwLock};
struct State {}
diff --git a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr
index 75063a8c9..b56ace200 100644
--- a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr
+++ b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr
@@ -1,5 +1,5 @@
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:56:11
+ --> $DIR/significant_drop_in_scrutinee.rs:55:11
|
LL | match mutex.lock().unwrap().foo() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -19,7 +19,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:142:11
+ --> $DIR/significant_drop_in_scrutinee.rs:141:11
|
LL | match s.lock_m().get_the_value() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -38,7 +38,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:163:11
+ --> $DIR/significant_drop_in_scrutinee.rs:162:11
|
LL | match s.lock_m_m().get_the_value() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -57,7 +57,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:211:11
+ --> $DIR/significant_drop_in_scrutinee.rs:210:11
|
LL | match counter.temp_increment().len() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -73,7 +73,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:234:16
+ --> $DIR/significant_drop_in_scrutinee.rs:233:16
|
LL | match (mutex1.lock().unwrap().s.len(), true) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -92,7 +92,7 @@ LL ~ match (value, true) {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:243:22
+ --> $DIR/significant_drop_in_scrutinee.rs:242:22
|
LL | match (true, mutex1.lock().unwrap().s.len(), true) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -111,7 +111,7 @@ LL ~ match (true, value, true) {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:253:16
+ --> $DIR/significant_drop_in_scrutinee.rs:252:16
|
LL | match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -132,7 +132,7 @@ LL ~ match (value, true, mutex2.lock().unwrap().s.len()) {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:253:54
+ --> $DIR/significant_drop_in_scrutinee.rs:252:54
|
LL | match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -153,7 +153,7 @@ LL ~ match (mutex1.lock().unwrap().s.len(), true, value) {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:264:15
+ --> $DIR/significant_drop_in_scrutinee.rs:263:15
|
LL | match mutex3.lock().unwrap().s.as_str() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -169,7 +169,7 @@ LL | };
= note: this might lead to deadlocks or other unexpected behavior
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:274:22
+ --> $DIR/significant_drop_in_scrutinee.rs:273:22
|
LL | match (true, mutex3.lock().unwrap().s.as_str()) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -185,7 +185,7 @@ LL | };
= note: this might lead to deadlocks or other unexpected behavior
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:293:11
+ --> $DIR/significant_drop_in_scrutinee.rs:292:11
|
LL | match mutex.lock().unwrap().s.len() > 1 {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -204,7 +204,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:300:11
+ --> $DIR/significant_drop_in_scrutinee.rs:299:11
|
LL | match 1 < mutex.lock().unwrap().s.len() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -223,7 +223,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:318:11
+ --> $DIR/significant_drop_in_scrutinee.rs:317:11
|
LL | match mutex1.lock().unwrap().s.len() < mutex2.lock().unwrap().s.len() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -244,7 +244,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:329:11
+ --> $DIR/significant_drop_in_scrutinee.rs:328:11
|
LL | match mutex1.lock().unwrap().s.len() >= mutex2.lock().unwrap().s.len() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -265,7 +265,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:364:11
+ --> $DIR/significant_drop_in_scrutinee.rs:363:11
|
LL | match get_mutex_guard().s.len() > 1 {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -284,7 +284,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:381:11
+ --> $DIR/significant_drop_in_scrutinee.rs:380:11
|
LL | match match i {
| ___________^
@@ -316,7 +316,7 @@ LL ~ match value
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:407:11
+ --> $DIR/significant_drop_in_scrutinee.rs:406:11
|
LL | match if i > 1 {
| ___________^
@@ -349,7 +349,7 @@ LL ~ match value
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:461:11
+ --> $DIR/significant_drop_in_scrutinee.rs:460:11
|
LL | match s.lock().deref().deref() {
| ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -367,7 +367,7 @@ LL ~ match value {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:489:11
+ --> $DIR/significant_drop_in_scrutinee.rs:488:11
|
LL | match s.lock().deref().deref() {
| ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -380,7 +380,7 @@ LL | };
= note: this might lead to deadlocks or other unexpected behavior
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:508:11
+ --> $DIR/significant_drop_in_scrutinee.rs:507:11
|
LL | match mutex.lock().unwrap().i = i {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -399,7 +399,7 @@ LL ~ match () {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:514:11
+ --> $DIR/significant_drop_in_scrutinee.rs:513:11
|
LL | match i = mutex.lock().unwrap().i {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -418,7 +418,7 @@ LL ~ match () {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:520:11
+ --> $DIR/significant_drop_in_scrutinee.rs:519:11
|
LL | match mutex.lock().unwrap().i += 1 {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -437,7 +437,7 @@ LL ~ match () {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:526:11
+ --> $DIR/significant_drop_in_scrutinee.rs:525:11
|
LL | match i += mutex.lock().unwrap().i {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -456,7 +456,7 @@ LL ~ match () {
|
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:589:11
+ --> $DIR/significant_drop_in_scrutinee.rs:588:11
|
LL | match rwlock.read().unwrap().to_number() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -467,7 +467,7 @@ LL | };
= note: this might lead to deadlocks or other unexpected behavior
error: temporary with significant `Drop` in `for` loop condition will live until the end of the `for` expression
- --> $DIR/significant_drop_in_scrutinee.rs:599:14
+ --> $DIR/significant_drop_in_scrutinee.rs:598:14
|
LL | for s in rwlock.read().unwrap().iter() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -478,7 +478,7 @@ LL | }
= note: this might lead to deadlocks or other unexpected behavior
error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression
- --> $DIR/significant_drop_in_scrutinee.rs:614:11
+ --> $DIR/significant_drop_in_scrutinee.rs:613:11
|
LL | match mutex.lock().unwrap().foo() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.fixed b/src/tools/clippy/tests/ui/significant_drop_tightening.fixed
index 7b848ead7..8065e9e5f 100644
--- a/src/tools/clippy/tests/ui/significant_drop_tightening.fixed
+++ b/src/tools/clippy/tests/ui/significant_drop_tightening.fixed
@@ -28,6 +28,56 @@ pub fn issue_10413() {
}
}
+pub fn issue_11128() {
+ use std::mem::drop as unlock;
+
+ struct Foo {
+ droppable: Option<Vec<i32>>,
+ mutex: Mutex<Vec<i32>>,
+ }
+
+ impl Drop for Foo {
+ fn drop(&mut self) {
+ if let Some(droppable) = self.droppable.take() {
+ let lock = self.mutex.lock().unwrap();
+ let idx_opt = lock.iter().copied().find(|el| Some(el) == droppable.first());
+ if let Some(idx) = idx_opt {
+ let local_droppable = vec![lock.first().copied().unwrap_or_default()];
+ unlock(lock);
+ drop(local_droppable);
+ }
+ }
+ }
+ }
+}
+
+pub fn issue_11160() -> bool {
+ let mutex = Mutex::new(1i32);
+ let lock = mutex.lock().unwrap();
+ let _ = lock.abs();
+ true
+}
+
+pub fn issue_11189() {
+ struct Number {
+ pub value: u32,
+ }
+
+ fn do_something() -> Result<(), ()> {
+ let number = Mutex::new(Number { value: 1 });
+ let number2 = Mutex::new(Number { value: 2 });
+ let number3 = Mutex::new(Number { value: 3 });
+ let mut lock = number.lock().unwrap();
+ let mut lock2 = number2.lock().unwrap();
+ let mut lock3 = number3.lock().unwrap();
+ lock.value += 1;
+ lock2.value += 1;
+ lock3.value += 1;
+ drop((lock, lock2, lock3));
+ Ok(())
+ }
+}
+
pub fn path_return_can_be_ignored() -> i32 {
let mutex = Mutex::new(1);
let lock = mutex.lock().unwrap();
diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.rs b/src/tools/clippy/tests/ui/significant_drop_tightening.rs
index 36f77cf1b..1620b7684 100644
--- a/src/tools/clippy/tests/ui/significant_drop_tightening.rs
+++ b/src/tools/clippy/tests/ui/significant_drop_tightening.rs
@@ -27,6 +27,56 @@ pub fn issue_10413() {
}
}
+pub fn issue_11128() {
+ use std::mem::drop as unlock;
+
+ struct Foo {
+ droppable: Option<Vec<i32>>,
+ mutex: Mutex<Vec<i32>>,
+ }
+
+ impl Drop for Foo {
+ fn drop(&mut self) {
+ if let Some(droppable) = self.droppable.take() {
+ let lock = self.mutex.lock().unwrap();
+ let idx_opt = lock.iter().copied().find(|el| Some(el) == droppable.first());
+ if let Some(idx) = idx_opt {
+ let local_droppable = vec![lock.first().copied().unwrap_or_default()];
+ unlock(lock);
+ drop(local_droppable);
+ }
+ }
+ }
+ }
+}
+
+pub fn issue_11160() -> bool {
+ let mutex = Mutex::new(1i32);
+ let lock = mutex.lock().unwrap();
+ let _ = lock.abs();
+ true
+}
+
+pub fn issue_11189() {
+ struct Number {
+ pub value: u32,
+ }
+
+ fn do_something() -> Result<(), ()> {
+ let number = Mutex::new(Number { value: 1 });
+ let number2 = Mutex::new(Number { value: 2 });
+ let number3 = Mutex::new(Number { value: 3 });
+ let mut lock = number.lock().unwrap();
+ let mut lock2 = number2.lock().unwrap();
+ let mut lock3 = number3.lock().unwrap();
+ lock.value += 1;
+ lock2.value += 1;
+ lock3.value += 1;
+ drop((lock, lock2, lock3));
+ Ok(())
+ }
+}
+
pub fn path_return_can_be_ignored() -> i32 {
let mutex = Mutex::new(1);
let lock = mutex.lock().unwrap();
diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.stderr b/src/tools/clippy/tests/ui/significant_drop_tightening.stderr
index 3bdac0b0a..b5cad88ad 100644
--- a/src/tools/clippy/tests/ui/significant_drop_tightening.stderr
+++ b/src/tools/clippy/tests/ui/significant_drop_tightening.stderr
@@ -23,7 +23,7 @@ LL + drop(lock);
|
error: temporary with significant `Drop` can be early dropped
- --> $DIR/significant_drop_tightening.rs:56:13
+ --> $DIR/significant_drop_tightening.rs:106:13
|
LL | / {
LL | | let mutex = Mutex::new(1i32);
@@ -43,7 +43,7 @@ LL + drop(lock);
|
error: temporary with significant `Drop` can be early dropped
- --> $DIR/significant_drop_tightening.rs:77:13
+ --> $DIR/significant_drop_tightening.rs:127:13
|
LL | / {
LL | | let mutex = Mutex::new(1i32);
@@ -67,7 +67,7 @@ LL +
|
error: temporary with significant `Drop` can be early dropped
- --> $DIR/significant_drop_tightening.rs:83:17
+ --> $DIR/significant_drop_tightening.rs:133:17
|
LL | / {
LL | | let mutex = Mutex::new(vec![1i32]);
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.fixed b/src/tools/clippy/tests/ui/single_component_path_imports.fixed
index d4d2cbbe5..b6b6b0288 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports.fixed
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.fixed
@@ -4,8 +4,12 @@
use core;
+
+
use serde as edres;
+
pub use serde;
+
use std;
macro_rules! m {
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.rs b/src/tools/clippy/tests/ui/single_component_path_imports.rs
index 80d72115f..a8c4d8990 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports.rs
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.rs
@@ -3,9 +3,13 @@
#![allow(unused_imports)]
use core;
+
use regex;
+
use serde as edres;
+
pub use serde;
+
use std;
macro_rules! m {
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.stderr b/src/tools/clippy/tests/ui/single_component_path_imports.stderr
index d69a86470..853a2fe0e 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports.stderr
+++ b/src/tools/clippy/tests/ui/single_component_path_imports.stderr
@@ -1,5 +1,5 @@
error: this import is redundant
- --> $DIR/single_component_path_imports.rs:6:1
+ --> $DIR/single_component_path_imports.rs:7:1
|
LL | use regex;
| ^^^^^^^^^^ help: remove it entirely
@@ -7,7 +7,7 @@ LL | use regex;
= note: `-D clippy::single-component-path-imports` implied by `-D warnings`
error: this import is redundant
- --> $DIR/single_component_path_imports.rs:29:5
+ --> $DIR/single_component_path_imports.rs:33:5
|
LL | use regex;
| ^^^^^^^^^^ help: remove it entirely
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs
index c75beb747..d6243c19f 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs
@@ -2,7 +2,9 @@
#![allow(unused_imports)]
use regex;
+
use serde as edres;
+
pub use serde;
fn main() {
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr
index 330f28520..ff148355e 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr
@@ -7,7 +7,7 @@ LL | use regex;
= note: `-D clippy::single-component-path-imports` implied by `-D warnings`
error: this import is redundant
- --> $DIR/single_component_path_imports_nested_first.rs:13:10
+ --> $DIR/single_component_path_imports_nested_first.rs:15:10
|
LL | use {regex, serde};
| ^^^^^
@@ -15,7 +15,7 @@ LL | use {regex, serde};
= help: remove this import
error: this import is redundant
- --> $DIR/single_component_path_imports_nested_first.rs:13:17
+ --> $DIR/single_component_path_imports_nested_first.rs:15:17
|
LL | use {regex, serde};
| ^^^^^
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs b/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs
index 48e8e5302..5723d480a 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs
@@ -2,6 +2,7 @@
#![allow(unused_imports)]
use self::regex::{Regex as xeger, RegexSet as tesxeger};
+#[rustfmt::skip]
pub use self::{
regex::{Regex, RegexSet},
some_mod::SomeType,
diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs b/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs
index 4fb0cf40b..8a4fbf0dc 100644
--- a/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs
+++ b/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs
@@ -4,6 +4,7 @@
use regex;
use self::regex::{Regex as xeger, RegexSet as tesxeger};
+#[rustfmt::skip]
pub use self::{
regex::{Regex, RegexSet},
some_mod::SomeType,
diff --git a/src/tools/clippy/tests/ui/single_match.fixed b/src/tools/clippy/tests/ui/single_match.fixed
index e7b1fd6a8..163ba94af 100644
--- a/src/tools/clippy/tests/ui/single_match.fixed
+++ b/src/tools/clippy/tests/ui/single_match.fixed
@@ -4,6 +4,7 @@
unused,
clippy::uninlined_format_args,
clippy::needless_if,
+ clippy::redundant_guards,
clippy::redundant_pattern_matching
)]
fn dummy() {}
diff --git a/src/tools/clippy/tests/ui/single_match.rs b/src/tools/clippy/tests/ui/single_match.rs
index 1515a7053..0dcdb125f 100644
--- a/src/tools/clippy/tests/ui/single_match.rs
+++ b/src/tools/clippy/tests/ui/single_match.rs
@@ -4,6 +4,7 @@
unused,
clippy::uninlined_format_args,
clippy::needless_if,
+ clippy::redundant_guards,
clippy::redundant_pattern_matching
)]
fn dummy() {}
diff --git a/src/tools/clippy/tests/ui/single_match.stderr b/src/tools/clippy/tests/ui/single_match.stderr
index ef9015132..d35361599 100644
--- a/src/tools/clippy/tests/ui/single_match.stderr
+++ b/src/tools/clippy/tests/ui/single_match.stderr
@@ -1,5 +1,5 @@
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:14:5
+ --> $DIR/single_match.rs:15:5
|
LL | / match x {
LL | | Some(y) => {
@@ -10,7 +10,7 @@ LL | | };
| |_____^
|
= note: `-D clippy::single-match` implied by `-D warnings`
-help: try this
+help: try
|
LL ~ if let Some(y) = x {
LL + println!("{:?}", y);
@@ -18,7 +18,7 @@ LL ~ };
|
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:22:5
+ --> $DIR/single_match.rs:23:5
|
LL | / match x {
LL | | // Note the missing block braces.
@@ -27,136 +27,136 @@ LL | | // is expanded before we can do anything.
LL | | Some(y) => println!("{:?}", y),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if let Some(y) = x { println!("{:?}", y) }`
+ | |_____^ help: try: `if let Some(y) = x { println!("{:?}", y) }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:31:5
+ --> $DIR/single_match.rs:32:5
|
LL | / match z {
LL | | (2..=3, 7..=9) => dummy(),
LL | | _ => {},
LL | | };
- | |_____^ help: try this: `if let (2..=3, 7..=9) = z { dummy() }`
+ | |_____^ help: try: `if let (2..=3, 7..=9) = z { dummy() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:60:5
+ --> $DIR/single_match.rs:61:5
|
LL | / match x {
LL | | Some(y) => dummy(),
LL | | None => (),
LL | | };
- | |_____^ help: try this: `if let Some(y) = x { dummy() }`
+ | |_____^ help: try: `if let Some(y) = x { dummy() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:65:5
+ --> $DIR/single_match.rs:66:5
|
LL | / match y {
LL | | Ok(y) => dummy(),
LL | | Err(..) => (),
LL | | };
- | |_____^ help: try this: `if let Ok(y) = y { dummy() }`
+ | |_____^ help: try: `if let Ok(y) = y { dummy() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:72:5
+ --> $DIR/single_match.rs:73:5
|
LL | / match c {
LL | | Cow::Borrowed(..) => dummy(),
LL | | Cow::Owned(..) => (),
LL | | };
- | |_____^ help: try this: `if let Cow::Borrowed(..) = c { dummy() }`
+ | |_____^ help: try: `if let Cow::Borrowed(..) = c { dummy() }`
error: you seem to be trying to use `match` for an equality check. Consider using `if`
- --> $DIR/single_match.rs:93:5
+ --> $DIR/single_match.rs:94:5
|
LL | / match x {
LL | | "test" => println!(),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if x == "test" { println!() }`
+ | |_____^ help: try: `if x == "test" { println!() }`
error: you seem to be trying to use `match` for an equality check. Consider using `if`
- --> $DIR/single_match.rs:106:5
+ --> $DIR/single_match.rs:107:5
|
LL | / match x {
LL | | Foo::A => println!(),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if x == Foo::A { println!() }`
+ | |_____^ help: try: `if x == Foo::A { println!() }`
error: you seem to be trying to use `match` for an equality check. Consider using `if`
- --> $DIR/single_match.rs:112:5
+ --> $DIR/single_match.rs:113:5
|
LL | / match x {
LL | | FOO_C => println!(),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if x == FOO_C { println!() }`
+ | |_____^ help: try: `if x == FOO_C { println!() }`
error: you seem to be trying to use `match` for an equality check. Consider using `if`
- --> $DIR/single_match.rs:117:5
+ --> $DIR/single_match.rs:118:5
|
LL | / match &&x {
LL | | Foo::A => println!(),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if x == Foo::A { println!() }`
+ | |_____^ help: try: `if x == Foo::A { println!() }`
error: you seem to be trying to use `match` for an equality check. Consider using `if`
- --> $DIR/single_match.rs:123:5
+ --> $DIR/single_match.rs:124:5
|
LL | / match &x {
LL | | Foo::A => println!(),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if x == &Foo::A { println!() }`
+ | |_____^ help: try: `if x == &Foo::A { println!() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:140:5
+ --> $DIR/single_match.rs:141:5
|
LL | / match x {
LL | | Bar::A => println!(),
LL | | _ => (),
LL | | }
- | |_____^ help: try this: `if let Bar::A = x { println!() }`
+ | |_____^ help: try: `if let Bar::A = x { println!() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:148:5
+ --> $DIR/single_match.rs:149:5
|
LL | / match x {
LL | | None => println!(),
LL | | _ => (),
LL | | };
- | |_____^ help: try this: `if let None = x { println!() }`
+ | |_____^ help: try: `if let None = x { println!() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:170:5
+ --> $DIR/single_match.rs:171:5
|
LL | / match x {
LL | | (Some(_), _) => {},
LL | | (None, _) => {},
LL | | }
- | |_____^ help: try this: `if let (Some(_), _) = x {}`
+ | |_____^ help: try: `if let (Some(_), _) = x {}`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:176:5
+ --> $DIR/single_match.rs:177:5
|
LL | / match x {
LL | | (Some(E::V), _) => todo!(),
LL | | (_, _) => {},
LL | | }
- | |_____^ help: try this: `if let (Some(E::V), _) = x { todo!() }`
+ | |_____^ help: try: `if let (Some(E::V), _) = x { todo!() }`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:182:5
+ --> $DIR/single_match.rs:183:5
|
LL | / match (Some(42), Some(E::V), Some(42)) {
LL | | (.., Some(E::V), _) => {},
LL | | (..) => {},
LL | | }
- | |_____^ help: try this: `if let (.., Some(E::V), _) = (Some(42), Some(E::V), Some(42)) {}`
+ | |_____^ help: try: `if let (.., Some(E::V), _) = (Some(42), Some(E::V), Some(42)) {}`
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:254:5
+ --> $DIR/single_match.rs:255:5
|
LL | / match bar {
LL | | Some(v) => unsafe {
@@ -167,7 +167,7 @@ LL | | _ => {},
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(v) = bar { unsafe {
LL + let r = &v as *const i32;
@@ -176,7 +176,7 @@ LL + } }
|
error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`
- --> $DIR/single_match.rs:262:5
+ --> $DIR/single_match.rs:263:5
|
LL | / match bar {
LL | | #[rustfmt::skip]
@@ -187,7 +187,7 @@ LL | | _ => {},
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(v) = bar {
LL + unsafe {
diff --git a/src/tools/clippy/tests/ui/single_match_else.stderr b/src/tools/clippy/tests/ui/single_match_else.stderr
index 228236f3b..5e7d4062e 100644
--- a/src/tools/clippy/tests/ui/single_match_else.stderr
+++ b/src/tools/clippy/tests/ui/single_match_else.stderr
@@ -12,7 +12,7 @@ LL | | };
| |_____^
|
= note: `-D clippy::single-match-else` implied by `-D warnings`
-help: try this
+help: try
|
LL ~ let _ = if let ExprNode::ExprAddrOf = ExprNode::Butterflies { Some(&NODE) } else {
LL + let x = 5;
@@ -32,7 +32,7 @@ LL | | },
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(a) = Some(1) { println!("${:?}", a) } else {
LL + println!("else block");
@@ -52,7 +52,7 @@ LL | | },
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(a) = Some(1) { println!("${:?}", a) } else {
LL + println!("else block");
@@ -72,7 +72,7 @@ LL | | }
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Ok(a) = Result::<i32, Infallible>::Ok(1) { println!("${:?}", a) } else {
LL + println!("else block");
@@ -92,7 +92,7 @@ LL | | }
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Cow::Owned(a) = Cow::from("moo") { println!("${:?}", a) } else {
LL + println!("else block");
@@ -112,7 +112,7 @@ LL | | },
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(v) = bar { unsafe {
LL + let r = &v as *const i32;
@@ -135,7 +135,7 @@ LL | | },
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(v) = bar {
LL + println!("Some");
@@ -159,7 +159,7 @@ LL | | },
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(v) = bar { unsafe {
LL + let r = &v as *const i32;
@@ -183,7 +183,7 @@ LL | | },
LL | | }
| |_____^
|
-help: try this
+help: try
|
LL ~ if let Some(v) = bar {
LL + unsafe {
diff --git a/src/tools/clippy/tests/ui/slow_vector_initialization.rs b/src/tools/clippy/tests/ui/slow_vector_initialization.rs
index 16be9f6d2..cfb856861 100644
--- a/src/tools/clippy/tests/ui/slow_vector_initialization.rs
+++ b/src/tools/clippy/tests/ui/slow_vector_initialization.rs
@@ -4,6 +4,7 @@ fn main() {
resize_vector();
extend_vector();
mixed_extend_resize_vector();
+ from_empty_vec();
}
fn extend_vector() {
@@ -59,6 +60,21 @@ fn resize_vector() {
vec1.resize(10, 0);
}
+fn from_empty_vec() {
+ // Resize with constant expression
+ let len = 300;
+ let mut vec1 = Vec::new();
+ vec1.resize(len, 0);
+
+ // Resize with len expression
+ let mut vec3 = Vec::new();
+ vec3.resize(len - 10, 0);
+
+ // Reinitialization should be warned
+ vec1 = Vec::new();
+ vec1.resize(10, 0);
+}
+
fn do_stuff(vec: &mut [u8]) {}
fn extend_vector_with_manipulations_between() {
diff --git a/src/tools/clippy/tests/ui/slow_vector_initialization.stderr b/src/tools/clippy/tests/ui/slow_vector_initialization.stderr
index cb3ce3e95..c88c97a55 100644
--- a/src/tools/clippy/tests/ui/slow_vector_initialization.stderr
+++ b/src/tools/clippy/tests/ui/slow_vector_initialization.stderr
@@ -1,76 +1,100 @@
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:13:5
+ --> $DIR/slow_vector_initialization.rs:14:5
|
LL | let mut vec1 = Vec::with_capacity(len);
- | ----------------------- help: consider replace allocation with: `vec![0; len]`
+ | ----------------------- help: consider replacing this with: `vec![0; len]`
LL | vec1.extend(repeat(0).take(len));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `-D clippy::slow-vector-initialization` implied by `-D warnings`
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:17:5
+ --> $DIR/slow_vector_initialization.rs:18:5
|
LL | let mut vec2 = Vec::with_capacity(len - 10);
- | ---------------------------- help: consider replace allocation with: `vec![0; len - 10]`
+ | ---------------------------- help: consider replacing this with: `vec![0; len - 10]`
LL | vec2.extend(repeat(0).take(len - 10));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:24:5
+ --> $DIR/slow_vector_initialization.rs:25:5
|
LL | let mut vec4 = Vec::with_capacity(len);
- | ----------------------- help: consider replace allocation with: `vec![0; len]`
+ | ----------------------- help: consider replacing this with: `vec![0; len]`
LL | vec4.extend(repeat(0).take(vec4.capacity()));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:34:5
+ --> $DIR/slow_vector_initialization.rs:35:5
|
LL | let mut resized_vec = Vec::with_capacity(30);
- | ---------------------- help: consider replace allocation with: `vec![0; 30]`
+ | ---------------------- help: consider replacing this with: `vec![0; 30]`
LL | resized_vec.resize(30, 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:37:5
+ --> $DIR/slow_vector_initialization.rs:38:5
|
LL | let mut extend_vec = Vec::with_capacity(30);
- | ---------------------- help: consider replace allocation with: `vec![0; 30]`
+ | ---------------------- help: consider replacing this with: `vec![0; 30]`
LL | extend_vec.extend(repeat(0).take(30));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:44:5
+ --> $DIR/slow_vector_initialization.rs:45:5
|
LL | let mut vec1 = Vec::with_capacity(len);
- | ----------------------- help: consider replace allocation with: `vec![0; len]`
+ | ----------------------- help: consider replacing this with: `vec![0; len]`
LL | vec1.resize(len, 0);
| ^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:52:5
+ --> $DIR/slow_vector_initialization.rs:53:5
|
LL | let mut vec3 = Vec::with_capacity(len - 10);
- | ---------------------------- help: consider replace allocation with: `vec![0; len - 10]`
+ | ---------------------------- help: consider replacing this with: `vec![0; len - 10]`
LL | vec3.resize(len - 10, 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:55:5
+ --> $DIR/slow_vector_initialization.rs:56:5
|
LL | let mut vec4 = Vec::with_capacity(len);
- | ----------------------- help: consider replace allocation with: `vec![0; len]`
+ | ----------------------- help: consider replacing this with: `vec![0; len]`
LL | vec4.resize(vec4.capacity(), 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: slow zero-filling initialization
- --> $DIR/slow_vector_initialization.rs:59:5
+ --> $DIR/slow_vector_initialization.rs:60:5
|
LL | vec1 = Vec::with_capacity(10);
- | ---------------------- help: consider replace allocation with: `vec![0; 10]`
+ | ---------------------- help: consider replacing this with: `vec![0; 10]`
LL | vec1.resize(10, 0);
| ^^^^^^^^^^^^^^^^^^
-error: aborting due to 9 previous errors
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:67:5
+ |
+LL | let mut vec1 = Vec::new();
+ | ---------- help: consider replacing this with: `vec![0; len]`
+LL | vec1.resize(len, 0);
+ | ^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:71:5
+ |
+LL | let mut vec3 = Vec::new();
+ | ---------- help: consider replacing this with: `vec![0; len - 10]`
+LL | vec3.resize(len - 10, 0);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: slow zero-filling initialization
+ --> $DIR/slow_vector_initialization.rs:75:5
+ |
+LL | vec1 = Vec::new();
+ | ---------- help: consider replacing this with: `vec![0; 10]`
+LL | vec1.resize(10, 0);
+ | ^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 12 previous errors
diff --git a/src/tools/clippy/tests/ui/string_extend.stderr b/src/tools/clippy/tests/ui/string_extend.stderr
index b35c77fd9..34b432901 100644
--- a/src/tools/clippy/tests/ui/string_extend.stderr
+++ b/src/tools/clippy/tests/ui/string_extend.stderr
@@ -2,7 +2,7 @@ error: calling `.extend(_.chars())`
--> $DIR/string_extend.rs:18:5
|
LL | s.extend(abc.chars());
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(abc)`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str(abc)`
|
= note: `-D clippy::string-extend-chars` implied by `-D warnings`
@@ -10,19 +10,19 @@ error: calling `.extend(_.chars())`
--> $DIR/string_extend.rs:21:5
|
LL | s.extend("abc".chars());
- | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str("abc")`
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str("abc")`
error: calling `.extend(_.chars())`
--> $DIR/string_extend.rs:24:5
|
LL | s.extend(def.chars());
- | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(&def)`
+ | ^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str(&def)`
error: calling `.extend(_.chars())`
--> $DIR/string_extend.rs:34:5
|
LL | s.extend(abc[0..2].chars());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(&abc[0..2])`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str(&abc[0..2])`
error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/string_lit_chars_any.fixed b/src/tools/clippy/tests/ui/string_lit_chars_any.fixed
new file mode 100644
index 000000000..d7ab9c339
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_lit_chars_any.fixed
@@ -0,0 +1,50 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(clippy::eq_op, clippy::needless_raw_string_hashes, clippy::no_effect, unused)]
+#![warn(clippy::string_lit_chars_any)]
+
+#[macro_use]
+extern crate proc_macros;
+
+struct NotStringLit;
+
+impl NotStringLit {
+ fn chars(&self) -> impl Iterator<Item = char> {
+ "c".chars()
+ }
+}
+
+fn main() {
+ let c = 'c';
+ matches!(c, '\\' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ matches!(c, '\\' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ matches!(c, '\\' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ matches!(c, '\\' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ #[rustfmt::skip]
+ matches!(c, '\\' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ // Do not lint
+ NotStringLit.chars().any(|x| x == c);
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| {
+ let c = 'c';
+ x == c
+ });
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| {
+ 1;
+ x == c
+ });
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == x);
+ "\\.+*?()|[]{}^$#&-~".chars().any(|_x| c == c);
+ matches!(
+ c,
+ '\\' | '.' | '+' | '*' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~'
+ );
+ external! {
+ let c = 'c';
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ }
+ with_span! {
+ span
+ let c = 'c';
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/string_lit_chars_any.rs b/src/tools/clippy/tests/ui/string_lit_chars_any.rs
new file mode 100644
index 000000000..9408d7bb2
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_lit_chars_any.rs
@@ -0,0 +1,50 @@
+//@run-rustfix
+//@aux-build:proc_macros.rs:proc-macro
+#![allow(clippy::eq_op, clippy::needless_raw_string_hashes, clippy::no_effect, unused)]
+#![warn(clippy::string_lit_chars_any)]
+
+#[macro_use]
+extern crate proc_macros;
+
+struct NotStringLit;
+
+impl NotStringLit {
+ fn chars(&self) -> impl Iterator<Item = char> {
+ "c".chars()
+ }
+}
+
+fn main() {
+ let c = 'c';
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ r#"\.+*?()|[]{}^$#&-~"#.chars().any(|x| x == c);
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| c == x);
+ r#"\.+*?()|[]{}^$#&-~"#.chars().any(|x| c == x);
+ #[rustfmt::skip]
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| { x == c });
+ // Do not lint
+ NotStringLit.chars().any(|x| x == c);
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| {
+ let c = 'c';
+ x == c
+ });
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| {
+ 1;
+ x == c
+ });
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == x);
+ "\\.+*?()|[]{}^$#&-~".chars().any(|_x| c == c);
+ matches!(
+ c,
+ '\\' | '.' | '+' | '*' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~'
+ );
+ external! {
+ let c = 'c';
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ }
+ with_span! {
+ span
+ let c = 'c';
+ "\\.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ }
+}
diff --git a/src/tools/clippy/tests/ui/string_lit_chars_any.stderr b/src/tools/clippy/tests/ui/string_lit_chars_any.stderr
new file mode 100644
index 000000000..ff951b73d
--- /dev/null
+++ b/src/tools/clippy/tests/ui/string_lit_chars_any.stderr
@@ -0,0 +1,58 @@
+error: usage of `.chars().any(...)` to check if a char matches any from a string literal
+ --> $DIR/string_lit_chars_any.rs:19:5
+ |
+LL | "//.+*?()|[]{}^$#&-~".chars().any(|x| x == c);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: `-D clippy::string-lit-chars-any` implied by `-D warnings`
+help: use `matches!(...)` instead
+ |
+LL | matches!(c, '//' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: usage of `.chars().any(...)` to check if a char matches any from a string literal
+ --> $DIR/string_lit_chars_any.rs:20:5
+ |
+LL | r#"/.+*?()|[]{}^$#&-~"#.chars().any(|x| x == c);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `matches!(...)` instead
+ |
+LL | matches!(c, '//' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: usage of `.chars().any(...)` to check if a char matches any from a string literal
+ --> $DIR/string_lit_chars_any.rs:21:5
+ |
+LL | "//.+*?()|[]{}^$#&-~".chars().any(|x| c == x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `matches!(...)` instead
+ |
+LL | matches!(c, '//' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: usage of `.chars().any(...)` to check if a char matches any from a string literal
+ --> $DIR/string_lit_chars_any.rs:22:5
+ |
+LL | r#"/.+*?()|[]{}^$#&-~"#.chars().any(|x| c == x);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `matches!(...)` instead
+ |
+LL | matches!(c, '//' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: usage of `.chars().any(...)` to check if a char matches any from a string literal
+ --> $DIR/string_lit_chars_any.rs:24:5
+ |
+LL | "//.+*?()|[]{}^$#&-~".chars().any(|x| { x == c });
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: use `matches!(...)` instead
+ |
+LL | matches!(c, '//' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to 5 previous errors
+
diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr
index 296268a5f..fcd17f689 100644
--- a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr
+++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr
@@ -2,7 +2,7 @@ error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:15:13
|
LL | let _ = unsafe { libc::strlen(cstring.as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstring.as_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstring.as_bytes().len()`
|
= note: `-D clippy::strlen-on-c-strings` implied by `-D warnings`
@@ -10,37 +10,37 @@ error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:19:13
|
LL | let _ = unsafe { libc::strlen(cstr.as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstr.to_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstr.to_bytes().len()`
error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:21:13
|
LL | let _ = unsafe { strlen(cstr.as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstr.to_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstr.to_bytes().len()`
error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:24:22
|
LL | let _ = unsafe { strlen((*pcstr).as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(*pcstr).to_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*pcstr).to_bytes().len()`
error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:29:22
|
LL | let _ = unsafe { strlen(unsafe_identity(cstr).as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unsafe_identity(cstr).to_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe_identity(cstr).to_bytes().len()`
error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:30:13
|
LL | let _ = unsafe { strlen(unsafe { unsafe_identity(cstr) }.as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unsafe { unsafe_identity(cstr) }.to_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe { unsafe_identity(cstr) }.to_bytes().len()`
error: using `libc::strlen` on a `CString` or `CStr` value
--> $DIR/strlen_on_c_strings.rs:33:22
|
LL | let _ = unsafe { strlen(f(cstr).as_ptr()) };
- | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `f(cstr).to_bytes().len()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `f(cstr).to_bytes().len()`
error: aborting due to 7 previous errors
diff --git a/src/tools/clippy/tests/ui/suspicious_xor_used_as_pow.stderr b/src/tools/clippy/tests/ui/suspicious_xor_used_as_pow.stderr
index 8bb3c8fbe..d93a55ba9 100644
--- a/src/tools/clippy/tests/ui/suspicious_xor_used_as_pow.stderr
+++ b/src/tools/clippy/tests/ui/suspicious_xor_used_as_pow.stderr
@@ -10,31 +10,31 @@ error: `^` is not the exponentiation operator
--> $DIR/suspicious_xor_used_as_pow.rs:20:13
|
LL | let _ = 2i32 ^ 9i32;
- | ^^^^^^^^^^^ help: did you mean to write: `2_i32.pow(9_i32)`
+ | ^^^^^^^^^^^ help: did you mean to write: `2i32.pow(9i32)`
error: `^` is not the exponentiation operator
--> $DIR/suspicious_xor_used_as_pow.rs:21:13
|
LL | let _ = 2i32 ^ 2i32;
- | ^^^^^^^^^^^ help: did you mean to write: `2_i32.pow(2_i32)`
+ | ^^^^^^^^^^^ help: did you mean to write: `2i32.pow(2i32)`
error: `^` is not the exponentiation operator
--> $DIR/suspicious_xor_used_as_pow.rs:22:13
|
LL | let _ = 50i32 ^ 3i32;
- | ^^^^^^^^^^^^ help: did you mean to write: `50_i32.pow(3_i32)`
+ | ^^^^^^^^^^^^ help: did you mean to write: `50i32.pow(3i32)`
error: `^` is not the exponentiation operator
--> $DIR/suspicious_xor_used_as_pow.rs:23:13
|
LL | let _ = 5i32 ^ 8i32;
- | ^^^^^^^^^^^ help: did you mean to write: `5_i32.pow(8_i32)`
+ | ^^^^^^^^^^^ help: did you mean to write: `5i32.pow(8i32)`
error: `^` is not the exponentiation operator
--> $DIR/suspicious_xor_used_as_pow.rs:24:13
|
LL | let _ = 2i32 ^ 32i32;
- | ^^^^^^^^^^^^ help: did you mean to write: `2_i32.pow(32_i32)`
+ | ^^^^^^^^^^^^ help: did you mean to write: `2i32.pow(32i32)`
error: `^` is not the exponentiation operator
--> $DIR/suspicious_xor_used_as_pow.rs:13:9
diff --git a/src/tools/clippy/tests/ui/swap.fixed b/src/tools/clippy/tests/ui/swap.fixed
index 22f904e3f..7b74a83b6 100644
--- a/src/tools/clippy/tests/ui/swap.fixed
+++ b/src/tools/clippy/tests/ui/swap.fixed
@@ -11,7 +11,8 @@
unused_assignments,
unused_variables,
clippy::let_and_return,
- clippy::useless_vec
+ clippy::useless_vec,
+ clippy::redundant_locals
)]
struct Foo(u32);
diff --git a/src/tools/clippy/tests/ui/swap.rs b/src/tools/clippy/tests/ui/swap.rs
index ada64f89e..93855cd7b 100644
--- a/src/tools/clippy/tests/ui/swap.rs
+++ b/src/tools/clippy/tests/ui/swap.rs
@@ -11,7 +11,8 @@
unused_assignments,
unused_variables,
clippy::let_and_return,
- clippy::useless_vec
+ clippy::useless_vec,
+ clippy::redundant_locals
)]
struct Foo(u32);
diff --git a/src/tools/clippy/tests/ui/swap.stderr b/src/tools/clippy/tests/ui/swap.stderr
index a3b9c2b74..1097b29bb 100644
--- a/src/tools/clippy/tests/ui/swap.stderr
+++ b/src/tools/clippy/tests/ui/swap.stderr
@@ -1,5 +1,5 @@
error: this looks like you are swapping `bar.a` and `bar.b` manually
- --> $DIR/swap.rs:28:5
+ --> $DIR/swap.rs:29:5
|
LL | / let temp = bar.a;
LL | | bar.a = bar.b;
@@ -10,7 +10,7 @@ LL | | bar.b = temp;
= note: `-D clippy::manual-swap` implied by `-D warnings`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:40:5
+ --> $DIR/swap.rs:41:5
|
LL | / let temp = foo[0];
LL | | foo[0] = foo[1];
@@ -18,7 +18,7 @@ LL | | foo[1] = temp;
| |__________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:49:5
+ --> $DIR/swap.rs:50:5
|
LL | / let temp = foo[0];
LL | | foo[0] = foo[1];
@@ -26,7 +26,7 @@ LL | | foo[1] = temp;
| |__________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:68:5
+ --> $DIR/swap.rs:69:5
|
LL | / let temp = foo[0];
LL | | foo[0] = foo[1];
@@ -34,7 +34,7 @@ LL | | foo[1] = temp;
| |__________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping `a` and `b` manually
- --> $DIR/swap.rs:79:5
+ --> $DIR/swap.rs:80:5
|
LL | / a ^= b;
LL | | b ^= a;
@@ -42,7 +42,7 @@ LL | | a ^= b;
| |___________^ help: try: `std::mem::swap(&mut a, &mut b);`
error: this looks like you are swapping `bar.a` and `bar.b` manually
- --> $DIR/swap.rs:87:5
+ --> $DIR/swap.rs:88:5
|
LL | / bar.a ^= bar.b;
LL | | bar.b ^= bar.a;
@@ -50,7 +50,7 @@ LL | | bar.a ^= bar.b;
| |___________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b);`
error: this looks like you are swapping elements of `foo` manually
- --> $DIR/swap.rs:95:5
+ --> $DIR/swap.rs:96:5
|
LL | / foo[0] ^= foo[1];
LL | | foo[1] ^= foo[0];
@@ -58,7 +58,7 @@ LL | | foo[0] ^= foo[1];
| |_____________________^ help: try: `foo.swap(0, 1);`
error: this looks like you are swapping `foo[0][1]` and `bar[1][0]` manually
- --> $DIR/swap.rs:124:5
+ --> $DIR/swap.rs:125:5
|
LL | / let temp = foo[0][1];
LL | | foo[0][1] = bar[1][0];
@@ -68,7 +68,7 @@ LL | | bar[1][0] = temp;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `a` and `b` manually
- --> $DIR/swap.rs:138:7
+ --> $DIR/swap.rs:139:7
|
LL | ; let t = a;
| _______^
@@ -79,7 +79,7 @@ LL | | b = t;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `c.0` and `a` manually
- --> $DIR/swap.rs:147:7
+ --> $DIR/swap.rs:148:7
|
LL | ; let t = c.0;
| _______^
@@ -90,7 +90,7 @@ LL | | a = t;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `b` and `a` manually
- --> $DIR/swap.rs:173:5
+ --> $DIR/swap.rs:174:5
|
LL | / let t = b;
LL | | b = a;
@@ -100,7 +100,7 @@ LL | | a = t;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `a` and `b`
- --> $DIR/swap.rs:135:5
+ --> $DIR/swap.rs:136:5
|
LL | / a = b;
LL | | b = a;
@@ -110,7 +110,7 @@ LL | | b = a;
= note: `-D clippy::almost-swapped` implied by `-D warnings`
error: this looks like you are trying to swap `c.0` and `a`
- --> $DIR/swap.rs:144:5
+ --> $DIR/swap.rs:145:5
|
LL | / c.0 = a;
LL | | a = c.0;
@@ -119,7 +119,7 @@ LL | | a = c.0;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `a` and `b`
- --> $DIR/swap.rs:151:5
+ --> $DIR/swap.rs:152:5
|
LL | / let a = b;
LL | | let b = a;
@@ -128,7 +128,7 @@ LL | | let b = a;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `d` and `c`
- --> $DIR/swap.rs:156:5
+ --> $DIR/swap.rs:157:5
|
LL | / d = c;
LL | | c = d;
@@ -137,7 +137,7 @@ LL | | c = d;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are trying to swap `a` and `b`
- --> $DIR/swap.rs:160:5
+ --> $DIR/swap.rs:161:5
|
LL | / let a = b;
LL | | b = a;
@@ -146,7 +146,7 @@ LL | | b = a;
= note: or maybe you should use `std::mem::replace`?
error: this looks like you are swapping `s.0.x` and `s.0.y` manually
- --> $DIR/swap.rs:208:5
+ --> $DIR/swap.rs:209:5
|
LL | / let t = s.0.x;
LL | | s.0.x = s.0.y;
diff --git a/src/tools/clippy/tests/ui/to_digit_is_some.stderr b/src/tools/clippy/tests/ui/to_digit_is_some.stderr
index 10a1b393a..c4718825d 100644
--- a/src/tools/clippy/tests/ui/to_digit_is_some.stderr
+++ b/src/tools/clippy/tests/ui/to_digit_is_some.stderr
@@ -2,7 +2,7 @@ error: use of `.to_digit(..).is_some()`
--> $DIR/to_digit_is_some.rs:9:13
|
LL | let _ = d.to_digit(8).is_some();
- | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `d.is_digit(8)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `d.is_digit(8)`
|
= note: `-D clippy::to-digit-is-some` implied by `-D warnings`
@@ -10,7 +10,7 @@ error: use of `.to_digit(..).is_some()`
--> $DIR/to_digit_is_some.rs:10:13
|
LL | let _ = char::to_digit(c, 8).is_some();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `char::is_digit(c, 8)`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `char::is_digit(c, 8)`
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs
index 486155831..86f5cc937 100644
--- a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs
+++ b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs
@@ -5,7 +5,8 @@
clippy::disallowed_names,
clippy::needless_lifetimes,
clippy::redundant_field_names,
- clippy::uninlined_format_args
+ clippy::uninlined_format_args,
+ clippy::needless_pass_by_ref_mut
)]
#[derive(Copy, Clone)]
diff --git a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr
index 8c5cfa8a0..2af668537 100644
--- a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr
+++ b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr
@@ -1,5 +1,5 @@
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:51:11
+ --> $DIR/trivially_copy_pass_by_ref.rs:52:11
|
LL | fn bad(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `u32`
@@ -11,103 +11,103 @@ LL | #![deny(clippy::trivially_copy_pass_by_ref)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:51:20
+ --> $DIR/trivially_copy_pass_by_ref.rs:52:20
|
LL | fn bad(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Foo`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:51:29
+ --> $DIR/trivially_copy_pass_by_ref.rs:52:29
|
LL | fn bad(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Baz`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:58:12
+ --> $DIR/trivially_copy_pass_by_ref.rs:59:12
|
LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
| ^^^^^ help: consider passing by value instead: `self`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:58:22
+ --> $DIR/trivially_copy_pass_by_ref.rs:59:22
|
LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `u32`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:58:31
+ --> $DIR/trivially_copy_pass_by_ref.rs:59:31
|
LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Foo`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:58:40
+ --> $DIR/trivially_copy_pass_by_ref.rs:59:40
|
LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Baz`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:60:16
+ --> $DIR/trivially_copy_pass_by_ref.rs:61:16
|
LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `u32`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:60:25
+ --> $DIR/trivially_copy_pass_by_ref.rs:61:25
|
LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Foo`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:60:34
+ --> $DIR/trivially_copy_pass_by_ref.rs:61:34
|
LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Baz`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:62:35
+ --> $DIR/trivially_copy_pass_by_ref.rs:63:35
|
LL | fn bad_issue7518(self, other: &Self) {}
| ^^^^^ help: consider passing by value instead: `Self`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:74:16
+ --> $DIR/trivially_copy_pass_by_ref.rs:75:16
|
LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `u32`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:74:25
+ --> $DIR/trivially_copy_pass_by_ref.rs:75:25
|
LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Foo`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:74:34
+ --> $DIR/trivially_copy_pass_by_ref.rs:75:34
|
LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {}
| ^^^^ help: consider passing by value instead: `Baz`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:78:34
+ --> $DIR/trivially_copy_pass_by_ref.rs:79:34
|
LL | fn trait_method(&self, _foo: &Foo);
| ^^^^ help: consider passing by value instead: `Foo`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:110:21
+ --> $DIR/trivially_copy_pass_by_ref.rs:111:21
|
LL | fn foo_never(x: &i32) {
| ^^^^ help: consider passing by value instead: `i32`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:115:15
+ --> $DIR/trivially_copy_pass_by_ref.rs:116:15
|
LL | fn foo(x: &i32) {
| ^^^^ help: consider passing by value instead: `i32`
error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte)
- --> $DIR/trivially_copy_pass_by_ref.rs:142:37
+ --> $DIR/trivially_copy_pass_by_ref.rs:143:37
|
LL | fn _unrelated_lifetimes<'a, 'b>(_x: &'a u32, y: &'b u32) -> &'b u32 {
| ^^^^^^^ help: consider passing by value instead: `u32`
diff --git a/src/tools/clippy/tests/ui/try_err.fixed b/src/tools/clippy/tests/ui/try_err.fixed
index 181674087..930489fab 100644
--- a/src/tools/clippy/tests/ui/try_err.fixed
+++ b/src/tools/clippy/tests/ui/try_err.fixed
@@ -2,7 +2,11 @@
//@aux-build:proc_macros.rs:proc-macro
#![deny(clippy::try_err)]
-#![allow(clippy::unnecessary_wraps, clippy::needless_question_mark)]
+#![allow(
+ clippy::unnecessary_wraps,
+ clippy::needless_question_mark,
+ clippy::needless_return_with_question_mark
+)]
extern crate proc_macros;
use proc_macros::{external, inline_macros};
diff --git a/src/tools/clippy/tests/ui/try_err.rs b/src/tools/clippy/tests/ui/try_err.rs
index 0e47c4d02..f5baf3d8f 100644
--- a/src/tools/clippy/tests/ui/try_err.rs
+++ b/src/tools/clippy/tests/ui/try_err.rs
@@ -2,7 +2,11 @@
//@aux-build:proc_macros.rs:proc-macro
#![deny(clippy::try_err)]
-#![allow(clippy::unnecessary_wraps, clippy::needless_question_mark)]
+#![allow(
+ clippy::unnecessary_wraps,
+ clippy::needless_question_mark,
+ clippy::needless_return_with_question_mark
+)]
extern crate proc_macros;
use proc_macros::{external, inline_macros};
diff --git a/src/tools/clippy/tests/ui/try_err.stderr b/src/tools/clippy/tests/ui/try_err.stderr
index 4ad0e2e56..9968b383e 100644
--- a/src/tools/clippy/tests/ui/try_err.stderr
+++ b/src/tools/clippy/tests/ui/try_err.stderr
@@ -1,8 +1,8 @@
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:19:9
+ --> $DIR/try_err.rs:23:9
|
LL | Err(err)?;
- | ^^^^^^^^^ help: try this: `return Err(err)`
+ | ^^^^^^^^^ help: try: `return Err(err)`
|
note: the lint level is defined here
--> $DIR/try_err.rs:4:9
@@ -11,68 +11,68 @@ LL | #![deny(clippy::try_err)]
| ^^^^^^^^^^^^^^^
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:29:9
+ --> $DIR/try_err.rs:33:9
|
LL | Err(err)?;
- | ^^^^^^^^^ help: try this: `return Err(err.into())`
+ | ^^^^^^^^^ help: try: `return Err(err.into())`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:49:17
+ --> $DIR/try_err.rs:53:17
|
LL | Err(err)?;
- | ^^^^^^^^^ help: try this: `return Err(err)`
+ | ^^^^^^^^^ help: try: `return Err(err)`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:68:17
+ --> $DIR/try_err.rs:72:17
|
LL | Err(err)?;
- | ^^^^^^^^^ help: try this: `return Err(err.into())`
+ | ^^^^^^^^^ help: try: `return Err(err.into())`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:88:23
+ --> $DIR/try_err.rs:92:23
|
LL | Err(_) => Err(1)?,
- | ^^^^^^^ help: try this: `return Err(1)`
+ | ^^^^^^^ help: try: `return Err(1)`
|
= note: this error originates in the macro `__inline_mac_fn_calling_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:95:23
+ --> $DIR/try_err.rs:99:23
|
LL | Err(_) => Err(inline!(1))?,
- | ^^^^^^^^^^^^^^^^ help: try this: `return Err(inline!(1))`
+ | ^^^^^^^^^^^^^^^^ help: try: `return Err(inline!(1))`
|
= note: this error originates in the macro `__inline_mac_fn_calling_macro` (in Nightly builds, run with -Z macro-backtrace for more info)
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:122:9
+ --> $DIR/try_err.rs:126:9
|
LL | Err(inline!(inline!(String::from("aasdfasdfasdfa"))))?;
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Err(inline!(inline!(String::from("aasdfasdfasdfa"))))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Err(inline!(inline!(String::from("aasdfasdfasdfa"))))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:129:9
+ --> $DIR/try_err.rs:133:9
|
LL | Err(io::ErrorKind::WriteZero)?
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:131:9
+ --> $DIR/try_err.rs:135:9
|
LL | Err(io::Error::new(io::ErrorKind::InvalidInput, "error"))?
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:139:9
+ --> $DIR/try_err.rs:143:9
|
LL | Err(io::ErrorKind::NotFound)?
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))`
error: returning an `Err(_)` with the `?` operator
- --> $DIR/try_err.rs:148:16
+ --> $DIR/try_err.rs:152:16
|
LL | return Err(42)?;
- | ^^^^^^^^ help: try this: `Err(42)`
+ | ^^^^^^^^ help: try: `Err(42)`
error: aborting due to 11 previous errors
diff --git a/src/tools/clippy/tests/ui/tuple_array_conversions.rs b/src/tools/clippy/tests/ui/tuple_array_conversions.rs
index f96a7c97f..569415acb 100644
--- a/src/tools/clippy/tests/ui/tuple_array_conversions.rs
+++ b/src/tools/clippy/tests/ui/tuple_array_conversions.rs
@@ -52,6 +52,36 @@ fn main() {
let v1: Vec<[u32; 2]> = t1.iter().map(|&(a, b)| [a, b]).collect();
let t2: Vec<(u32, u32)> = v1.iter().map(|&[a, b]| (a, b)).collect();
}
+ // FP #11082; needs discussion
+ let (a, b) = (1.0f64, 2.0f64);
+ let _: &[f64] = &[a, b];
+ // FP #11085; impossible to fix
+ let [src, dest]: [_; 2] = [1, 2];
+ (src, dest);
+ // FP #11100
+ fn issue_11100_array_to_tuple(this: [&mut i32; 2]) -> (&i32, &mut i32) {
+ let [input, output] = this;
+ (input, output)
+ }
+
+ fn issue_11100_tuple_to_array<'a>(this: (&'a mut i32, &'a mut i32)) -> [&'a i32; 2] {
+ let (input, output) = this;
+ [input, output]
+ }
+ // FP #11124
+ // tuple=>array
+ let (a, b) = (1, 2);
+ [a, b];
+ let x = a;
+ // array=>tuple
+ let [a, b] = [1, 2];
+ (a, b);
+ let x = a;
+ // FP #11144
+ let (a, (b, c)) = (1, (2, 3));
+ [a, c];
+ let [[a, b], [c, d]] = [[1, 2], [3, 4]];
+ (a, c);
}
#[clippy::msrv = "1.70.0"]
diff --git a/src/tools/clippy/tests/ui/tuple_array_conversions.stderr b/src/tools/clippy/tests/ui/tuple_array_conversions.stderr
index be653e8ef..50bdcf29d 100644
--- a/src/tools/clippy/tests/ui/tuple_array_conversions.stderr
+++ b/src/tools/clippy/tests/ui/tuple_array_conversions.stderr
@@ -15,14 +15,6 @@ LL | let x = [x.0, x.1];
|
= help: use `.into()` instead, or `<[T; N]>::from` if type annotations are needed
-error: it looks like you're trying to convert an array to a tuple
- --> $DIR/tuple_array_conversions.rs:13:13
- |
-LL | let x = (x[0], x[1]);
- | ^^^^^^^^^^^^
- |
- = help: use `.into()` instead, or `<(T0, T1, ..., Tn)>::from` if type annotations are needed
-
error: it looks like you're trying to convert a tuple to an array
--> $DIR/tuple_array_conversions.rs:16:53
|
@@ -55,8 +47,24 @@ LL | t1.iter().for_each(|&(a, b)| _ = [a, b]);
|
= help: use `.into()` instead, or `<[T; N]>::from` if type annotations are needed
+error: it looks like you're trying to convert a tuple to an array
+ --> $DIR/tuple_array_conversions.rs:57:22
+ |
+LL | let _: &[f64] = &[a, b];
+ | ^^^^^^
+ |
+ = help: use `.into()` instead, or `<[T; N]>::from` if type annotations are needed
+
error: it looks like you're trying to convert an array to a tuple
- --> $DIR/tuple_array_conversions.rs:69:13
+ --> $DIR/tuple_array_conversions.rs:60:5
+ |
+LL | (src, dest);
+ | ^^^^^^^^^^^
+ |
+ = help: use `.into()` instead, or `<(T0, T1, ..., Tn)>::from` if type annotations are needed
+
+error: it looks like you're trying to convert an array to a tuple
+ --> $DIR/tuple_array_conversions.rs:99:13
|
LL | let x = (x[0], x[1]);
| ^^^^^^^^^^^^
@@ -64,20 +72,12 @@ LL | let x = (x[0], x[1]);
= help: use `.into()` instead, or `<(T0, T1, ..., Tn)>::from` if type annotations are needed
error: it looks like you're trying to convert a tuple to an array
- --> $DIR/tuple_array_conversions.rs:70:13
+ --> $DIR/tuple_array_conversions.rs:100:13
|
LL | let x = [x.0, x.1];
| ^^^^^^^^^^
|
= help: use `.into()` instead, or `<[T; N]>::from` if type annotations are needed
-error: it looks like you're trying to convert an array to a tuple
- --> $DIR/tuple_array_conversions.rs:72:13
- |
-LL | let x = (x[0], x[1]);
- | ^^^^^^^^^^^^
- |
- = help: use `.into()` instead, or `<(T0, T1, ..., Tn)>::from` if type annotations are needed
-
error: aborting due to 10 previous errors
diff --git a/src/tools/clippy/tests/ui/type_id_on_box.fixed b/src/tools/clippy/tests/ui/type_id_on_box.fixed
new file mode 100644
index 000000000..615d809c8
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_id_on_box.fixed
@@ -0,0 +1,40 @@
+//@run-rustfix
+
+#![warn(clippy::type_id_on_box)]
+
+use std::any::{Any, TypeId};
+use std::ops::Deref;
+
+type SomeBox = Box<dyn Any>;
+
+struct BadBox(Box<dyn Any>);
+
+impl Deref for BadBox {
+ type Target = Box<dyn Any>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn existential() -> impl Any {
+ Box::new(1) as Box<dyn Any>
+}
+
+fn main() {
+ let any_box: Box<dyn Any> = Box::new(0usize);
+ let _ = (*any_box).type_id();
+ let _ = TypeId::of::<Box<dyn Any>>(); // Don't lint. We explicitly say "do this instead" if this is intentional
+ let _ = (*any_box).type_id();
+ let any_box: &Box<dyn Any> = &(Box::new(0usize) as Box<dyn Any>);
+ let _ = (**any_box).type_id(); // 2 derefs are needed here to get to the `dyn Any`
+
+ let b = existential();
+ let _ = b.type_id(); // Don't lint.
+
+ let b: SomeBox = Box::new(0usize);
+ let _ = (*b).type_id();
+
+ let b = BadBox(Box::new(0usize));
+ let _ = b.type_id(); // Don't lint. This is a call to `<BadBox as Any>::type_id`. Not `std::boxed::Box`!
+}
diff --git a/src/tools/clippy/tests/ui/type_id_on_box.rs b/src/tools/clippy/tests/ui/type_id_on_box.rs
new file mode 100644
index 000000000..74b6c74ae
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_id_on_box.rs
@@ -0,0 +1,40 @@
+//@run-rustfix
+
+#![warn(clippy::type_id_on_box)]
+
+use std::any::{Any, TypeId};
+use std::ops::Deref;
+
+type SomeBox = Box<dyn Any>;
+
+struct BadBox(Box<dyn Any>);
+
+impl Deref for BadBox {
+ type Target = Box<dyn Any>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn existential() -> impl Any {
+ Box::new(1) as Box<dyn Any>
+}
+
+fn main() {
+ let any_box: Box<dyn Any> = Box::new(0usize);
+ let _ = any_box.type_id();
+ let _ = TypeId::of::<Box<dyn Any>>(); // Don't lint. We explicitly say "do this instead" if this is intentional
+ let _ = (*any_box).type_id();
+ let any_box: &Box<dyn Any> = &(Box::new(0usize) as Box<dyn Any>);
+ let _ = any_box.type_id(); // 2 derefs are needed here to get to the `dyn Any`
+
+ let b = existential();
+ let _ = b.type_id(); // Don't lint.
+
+ let b: SomeBox = Box::new(0usize);
+ let _ = b.type_id();
+
+ let b = BadBox(Box::new(0usize));
+ let _ = b.type_id(); // Don't lint. This is a call to `<BadBox as Any>::type_id`. Not `std::boxed::Box`!
+}
diff --git a/src/tools/clippy/tests/ui/type_id_on_box.stderr b/src/tools/clippy/tests/ui/type_id_on_box.stderr
new file mode 100644
index 000000000..1525328c0
--- /dev/null
+++ b/src/tools/clippy/tests/ui/type_id_on_box.stderr
@@ -0,0 +1,36 @@
+error: calling `.type_id()` on a `Box<dyn Any>`
+ --> $DIR/type_id_on_box.rs:26:13
+ |
+LL | let _ = any_box.type_id();
+ | -------^^^^^^^^^^
+ | |
+ | help: consider dereferencing first: `(*any_box)`
+ |
+ = note: this returns the type id of the literal type `Box<dyn Any>` instead of the type id of the boxed value, which is most likely not what you want
+ = note: if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, which makes it more clear
+ = note: `-D clippy::type-id-on-box` implied by `-D warnings`
+
+error: calling `.type_id()` on a `Box<dyn Any>`
+ --> $DIR/type_id_on_box.rs:30:13
+ |
+LL | let _ = any_box.type_id(); // 2 derefs are needed here to get to the `dyn Any`
+ | -------^^^^^^^^^^
+ | |
+ | help: consider dereferencing first: `(**any_box)`
+ |
+ = note: this returns the type id of the literal type `Box<dyn Any>` instead of the type id of the boxed value, which is most likely not what you want
+ = note: if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, which makes it more clear
+
+error: calling `.type_id()` on a `Box<dyn Any>`
+ --> $DIR/type_id_on_box.rs:36:13
+ |
+LL | let _ = b.type_id();
+ | -^^^^^^^^^^
+ | |
+ | help: consider dereferencing first: `(*b)`
+ |
+ = note: this returns the type id of the literal type `Box<dyn Any>` instead of the type id of the boxed value, which is most likely not what you want
+ = note: if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, which makes it more clear
+
+error: aborting due to 3 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.fixed b/src/tools/clippy/tests/ui/unnecessary_cast.fixed
index 8efd44baf..2bf02f134 100644
--- a/src/tools/clippy/tests/ui/unnecessary_cast.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_cast.fixed
@@ -38,6 +38,16 @@ mod fake_libc {
}
}
+fn aaa() -> ::std::primitive::u32 {
+ 0
+}
+
+use std::primitive::u32 as UnsignedThirtyTwoBitInteger;
+
+fn bbb() -> UnsignedThirtyTwoBitInteger {
+ 0
+}
+
#[rustfmt::skip]
fn main() {
// Test cast_unnecessary
@@ -105,6 +115,13 @@ fn main() {
extern_fake_libc::getpid_SAFE_TRUTH() as i32;
let pid = unsafe { fake_libc::getpid() };
pid as i32;
+ aaa();
+ let x = aaa();
+ aaa();
+ // Will not lint currently.
+ bbb() as u32;
+ let x = bbb();
+ bbb() as u32;
let i8_ptr: *const i8 = &1;
let u8_ptr: *const u8 = &1;
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.rs b/src/tools/clippy/tests/ui/unnecessary_cast.rs
index c7723ef51..25b6b0f9b 100644
--- a/src/tools/clippy/tests/ui/unnecessary_cast.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_cast.rs
@@ -38,6 +38,16 @@ mod fake_libc {
}
}
+fn aaa() -> ::std::primitive::u32 {
+ 0
+}
+
+use std::primitive::u32 as UnsignedThirtyTwoBitInteger;
+
+fn bbb() -> UnsignedThirtyTwoBitInteger {
+ 0
+}
+
#[rustfmt::skip]
fn main() {
// Test cast_unnecessary
@@ -105,6 +115,13 @@ fn main() {
extern_fake_libc::getpid_SAFE_TRUTH() as i32;
let pid = unsafe { fake_libc::getpid() };
pid as i32;
+ aaa() as u32;
+ let x = aaa();
+ aaa() as u32;
+ // Will not lint currently.
+ bbb() as u32;
+ let x = bbb();
+ bbb() as u32;
let i8_ptr: *const i8 = &1;
let u8_ptr: *const u8 = &1;
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.stderr b/src/tools/clippy/tests/ui/unnecessary_cast.stderr
index f0443556f..19411a01b 100644
--- a/src/tools/clippy/tests/ui/unnecessary_cast.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_cast.stderr
@@ -7,226 +7,238 @@ LL | ptr as *const T
= note: `-D clippy::unnecessary-cast` implied by `-D warnings`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:44:5
+ --> $DIR/unnecessary_cast.rs:54:5
|
LL | 1i32 as i32;
| ^^^^^^^^^^^ help: try: `1_i32`
error: casting float literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:45:5
+ --> $DIR/unnecessary_cast.rs:55:5
|
LL | 1f32 as f32;
| ^^^^^^^^^^^ help: try: `1_f32`
error: casting to the same type is unnecessary (`bool` -> `bool`)
- --> $DIR/unnecessary_cast.rs:46:5
+ --> $DIR/unnecessary_cast.rs:56:5
|
LL | false as bool;
| ^^^^^^^^^^^^^ help: try: `false`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:49:5
+ --> $DIR/unnecessary_cast.rs:59:5
|
LL | -1_i32 as i32;
| ^^^^^^^^^^^^^ help: try: `-1_i32`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:50:5
+ --> $DIR/unnecessary_cast.rs:60:5
|
LL | - 1_i32 as i32;
| ^^^^^^^^^^^^^^ help: try: `- 1_i32`
error: casting float literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:51:5
+ --> $DIR/unnecessary_cast.rs:61:5
|
LL | -1f32 as f32;
| ^^^^^^^^^^^^ help: try: `-1_f32`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:52:5
+ --> $DIR/unnecessary_cast.rs:62:5
|
LL | 1_i32 as i32;
| ^^^^^^^^^^^^ help: try: `1_i32`
error: casting float literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:53:5
+ --> $DIR/unnecessary_cast.rs:63:5
|
LL | 1_f32 as f32;
| ^^^^^^^^^^^^ help: try: `1_f32`
error: casting raw pointers to the same type and constness is unnecessary (`*const u8` -> `*const u8`)
- --> $DIR/unnecessary_cast.rs:55:22
+ --> $DIR/unnecessary_cast.rs:65:22
|
LL | let _: *mut u8 = [1u8, 2].as_ptr() as *const u8 as *mut u8;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `[1u8, 2].as_ptr()`
error: casting raw pointers to the same type and constness is unnecessary (`*const u8` -> `*const u8`)
- --> $DIR/unnecessary_cast.rs:57:5
+ --> $DIR/unnecessary_cast.rs:67:5
|
LL | [1u8, 2].as_ptr() as *const u8;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `[1u8, 2].as_ptr()`
error: casting raw pointers to the same type and constness is unnecessary (`*mut u8` -> `*mut u8`)
- --> $DIR/unnecessary_cast.rs:59:5
+ --> $DIR/unnecessary_cast.rs:69:5
|
LL | [1u8, 2].as_mut_ptr() as *mut u8;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `[1u8, 2].as_mut_ptr()`
error: casting raw pointers to the same type and constness is unnecessary (`*const u32` -> `*const u32`)
- --> $DIR/unnecessary_cast.rs:70:5
+ --> $DIR/unnecessary_cast.rs:80:5
|
LL | owo::<u32>([1u32].as_ptr()) as *const u32;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `owo::<u32>([1u32].as_ptr())`
error: casting raw pointers to the same type and constness is unnecessary (`*const u8` -> `*const u8`)
- --> $DIR/unnecessary_cast.rs:71:5
+ --> $DIR/unnecessary_cast.rs:81:5
|
LL | uwu::<u32, u8>([1u32].as_ptr()) as *const u8;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `uwu::<u32, u8>([1u32].as_ptr())`
error: casting raw pointers to the same type and constness is unnecessary (`*const u32` -> `*const u32`)
- --> $DIR/unnecessary_cast.rs:73:5
+ --> $DIR/unnecessary_cast.rs:83:5
|
LL | uwu::<u32, u32>([1u32].as_ptr()) as *const u32;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `uwu::<u32, u32>([1u32].as_ptr())`
+error: casting to the same type is unnecessary (`u32` -> `u32`)
+ --> $DIR/unnecessary_cast.rs:118:5
+ |
+LL | aaa() as u32;
+ | ^^^^^^^^^^^^ help: try: `aaa()`
+
+error: casting to the same type is unnecessary (`u32` -> `u32`)
+ --> $DIR/unnecessary_cast.rs:120:5
+ |
+LL | aaa() as u32;
+ | ^^^^^^^^^^^^ help: try: `aaa()`
+
error: casting integer literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:139:9
+ --> $DIR/unnecessary_cast.rs:156:9
|
LL | 100 as f32;
| ^^^^^^^^^^ help: try: `100_f32`
error: casting integer literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:140:9
+ --> $DIR/unnecessary_cast.rs:157:9
|
LL | 100 as f64;
| ^^^^^^^^^^ help: try: `100_f64`
error: casting integer literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:141:9
+ --> $DIR/unnecessary_cast.rs:158:9
|
LL | 100_i32 as f64;
| ^^^^^^^^^^^^^^ help: try: `100_f64`
error: casting integer literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:142:17
+ --> $DIR/unnecessary_cast.rs:159:17
|
LL | let _ = -100 as f32;
| ^^^^^^^^^^^ help: try: `-100_f32`
error: casting integer literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:143:17
+ --> $DIR/unnecessary_cast.rs:160:17
|
LL | let _ = -100 as f64;
| ^^^^^^^^^^^ help: try: `-100_f64`
error: casting integer literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:144:17
+ --> $DIR/unnecessary_cast.rs:161:17
|
LL | let _ = -100_i32 as f64;
| ^^^^^^^^^^^^^^^ help: try: `-100_f64`
error: casting float literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:145:9
+ --> $DIR/unnecessary_cast.rs:162:9
|
LL | 100. as f32;
| ^^^^^^^^^^^ help: try: `100_f32`
error: casting float literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:146:9
+ --> $DIR/unnecessary_cast.rs:163:9
|
LL | 100. as f64;
| ^^^^^^^^^^^ help: try: `100_f64`
error: casting integer literal to `u32` is unnecessary
- --> $DIR/unnecessary_cast.rs:158:9
+ --> $DIR/unnecessary_cast.rs:175:9
|
LL | 1 as u32;
| ^^^^^^^^ help: try: `1_u32`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:159:9
+ --> $DIR/unnecessary_cast.rs:176:9
|
LL | 0x10 as i32;
| ^^^^^^^^^^^ help: try: `0x10_i32`
error: casting integer literal to `usize` is unnecessary
- --> $DIR/unnecessary_cast.rs:160:9
+ --> $DIR/unnecessary_cast.rs:177:9
|
LL | 0b10 as usize;
| ^^^^^^^^^^^^^ help: try: `0b10_usize`
error: casting integer literal to `u16` is unnecessary
- --> $DIR/unnecessary_cast.rs:161:9
+ --> $DIR/unnecessary_cast.rs:178:9
|
LL | 0o73 as u16;
| ^^^^^^^^^^^ help: try: `0o73_u16`
error: casting integer literal to `u32` is unnecessary
- --> $DIR/unnecessary_cast.rs:162:9
+ --> $DIR/unnecessary_cast.rs:179:9
|
LL | 1_000_000_000 as u32;
| ^^^^^^^^^^^^^^^^^^^^ help: try: `1_000_000_000_u32`
error: casting float literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:164:9
+ --> $DIR/unnecessary_cast.rs:181:9
|
LL | 1.0 as f64;
| ^^^^^^^^^^ help: try: `1.0_f64`
error: casting float literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:165:9
+ --> $DIR/unnecessary_cast.rs:182:9
|
LL | 0.5 as f32;
| ^^^^^^^^^^ help: try: `0.5_f32`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:169:17
+ --> $DIR/unnecessary_cast.rs:186:17
|
LL | let _ = -1 as i32;
| ^^^^^^^^^ help: try: `-1_i32`
error: casting float literal to `f32` is unnecessary
- --> $DIR/unnecessary_cast.rs:170:17
+ --> $DIR/unnecessary_cast.rs:187:17
|
LL | let _ = -1.0 as f32;
| ^^^^^^^^^^^ help: try: `-1.0_f32`
error: casting to the same type is unnecessary (`i32` -> `i32`)
- --> $DIR/unnecessary_cast.rs:176:18
+ --> $DIR/unnecessary_cast.rs:193:18
|
LL | let _ = &(x as i32);
| ^^^^^^^^^^ help: try: `{ x }`
error: casting integer literal to `i32` is unnecessary
- --> $DIR/unnecessary_cast.rs:182:22
+ --> $DIR/unnecessary_cast.rs:199:22
|
LL | let _: i32 = -(1) as i32;
| ^^^^^^^^^^^ help: try: `-1_i32`
error: casting integer literal to `i64` is unnecessary
- --> $DIR/unnecessary_cast.rs:184:22
+ --> $DIR/unnecessary_cast.rs:201:22
|
LL | let _: i64 = -(1) as i64;
| ^^^^^^^^^^^ help: try: `-1_i64`
error: casting float literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:191:22
+ --> $DIR/unnecessary_cast.rs:208:22
|
LL | let _: f64 = (-8.0 as f64).exp();
| ^^^^^^^^^^^^^ help: try: `(-8.0_f64)`
error: casting float literal to `f64` is unnecessary
- --> $DIR/unnecessary_cast.rs:193:23
+ --> $DIR/unnecessary_cast.rs:210:23
|
LL | let _: f64 = -(8.0 as f64).exp(); // should suggest `-8.0_f64.exp()` here not to change code behavior
| ^^^^^^^^^^^^ help: try: `8.0_f64`
error: casting to the same type is unnecessary (`f32` -> `f32`)
- --> $DIR/unnecessary_cast.rs:201:20
+ --> $DIR/unnecessary_cast.rs:218:20
|
LL | let _num = foo() as f32;
| ^^^^^^^^^^^^ help: try: `foo()`
-error: aborting due to 38 previous errors
+error: aborting due to 40 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs
new file mode 100644
index 000000000..0e027f604
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs
@@ -0,0 +1,22 @@
+#![warn(clippy::unnecessary_cast)]
+
+fn main() {
+ let _ = std::ptr::null() as *const u8;
+}
+
+mod issue11113 {
+ #[repr(C)]
+ struct Vtbl {
+ query: unsafe extern "system" fn(),
+ }
+
+ struct TearOff {
+ object: *mut std::ffi::c_void,
+ }
+
+ impl TearOff {
+ unsafe fn query(&self) {
+ ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)()
+ }
+ }
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr
new file mode 100644
index 000000000..eecf24568
--- /dev/null
+++ b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr
@@ -0,0 +1,16 @@
+error: casting raw pointers to the same type and constness is unnecessary (`*const u8` -> `*const u8`)
+ --> $DIR/unnecessary_cast_unfixable.rs:4:13
+ |
+LL | let _ = std::ptr::null() as *const u8;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::null()`
+ |
+ = note: `-D clippy::unnecessary-cast` implied by `-D warnings`
+
+error: casting raw pointers to the same type and constness is unnecessary (`*mut issue11113::Vtbl` -> `*mut issue11113::Vtbl`)
+ --> $DIR/unnecessary_cast_unfixable.rs:19:16
+ |
+LL | ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `*(self.object as *mut *mut _)`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/tools/clippy/tests/ui/unnecessary_clone.stderr b/src/tools/clippy/tests/ui/unnecessary_clone.stderr
index 5686ab6b4..23639f6d4 100644
--- a/src/tools/clippy/tests/ui/unnecessary_clone.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_clone.stderr
@@ -2,7 +2,7 @@ error: using `.clone()` on a ref-counted pointer
--> $DIR/unnecessary_clone.rs:23:5
|
LL | rc.clone();
- | ^^^^^^^^^^ help: try this: `Rc::<bool>::clone(&rc)`
+ | ^^^^^^^^^^ help: try: `Rc::<bool>::clone(&rc)`
|
= note: `-D clippy::clone-on-ref-ptr` implied by `-D warnings`
@@ -10,25 +10,25 @@ error: using `.clone()` on a ref-counted pointer
--> $DIR/unnecessary_clone.rs:26:5
|
LL | arc.clone();
- | ^^^^^^^^^^^ help: try this: `Arc::<bool>::clone(&arc)`
+ | ^^^^^^^^^^^ help: try: `Arc::<bool>::clone(&arc)`
error: using `.clone()` on a ref-counted pointer
--> $DIR/unnecessary_clone.rs:29:5
|
LL | rcweak.clone();
- | ^^^^^^^^^^^^^^ help: try this: `Weak::<bool>::clone(&rcweak)`
+ | ^^^^^^^^^^^^^^ help: try: `Weak::<bool>::clone(&rcweak)`
error: using `.clone()` on a ref-counted pointer
--> $DIR/unnecessary_clone.rs:32:5
|
LL | arc_weak.clone();
- | ^^^^^^^^^^^^^^^^ help: try this: `Weak::<bool>::clone(&arc_weak)`
+ | ^^^^^^^^^^^^^^^^ help: try: `Weak::<bool>::clone(&arc_weak)`
error: using `.clone()` on a ref-counted pointer
--> $DIR/unnecessary_clone.rs:36:33
|
LL | let _: Arc<dyn SomeTrait> = x.clone();
- | ^^^^^^^^^ help: try this: `Arc::<SomeImpl>::clone(&x)`
+ | ^^^^^^^^^ help: try: `Arc::<SomeImpl>::clone(&x)`
error: using `clone` on type `T` which implements the `Copy` trait
--> $DIR/unnecessary_clone.rs:40:5
@@ -54,7 +54,7 @@ error: using `.clone()` on a ref-counted pointer
--> $DIR/unnecessary_clone.rs:95:14
|
LL | Some(try_opt!(Some(rc)).clone())
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Rc::<u8>::clone(&try_opt!(Some(rc)))`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Rc::<u8>::clone(&try_opt!(Some(rc)))`
error: aborting due to 9 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_filter_map.rs b/src/tools/clippy/tests/ui/unnecessary_filter_map.rs
index 8e01c2674..3c8c6ec94 100644
--- a/src/tools/clippy/tests/ui/unnecessary_filter_map.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_filter_map.rs
@@ -148,3 +148,9 @@ mod comment_1052978898 {
})
}
}
+
+fn issue11260() {
+ // #11260 is about unnecessary_find_map, but the fix also kind of applies to
+ // unnecessary_filter_map
+ let _x = std::iter::once(1).filter_map(|n| (n > 1).then_some(n));
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr b/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr
index 5585b10ab..2d5403ce3 100644
--- a/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr
@@ -34,5 +34,11 @@ error: this `.filter_map` can be written more simply using `.map`
LL | let _ = (0..4).filter_map(|x| Some(x + 1));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to 4 previous errors
+error: this `.filter_map` can be written more simply using `.filter`
+ --> $DIR/unnecessary_filter_map.rs:155:14
+ |
+LL | let _x = std::iter::once(1).filter_map(|n| (n > 1).then_some(n));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_find_map.rs b/src/tools/clippy/tests/ui/unnecessary_find_map.rs
index a52390861..2c228fbbc 100644
--- a/src/tools/clippy/tests/ui/unnecessary_find_map.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_find_map.rs
@@ -21,3 +21,9 @@ fn main() {
fn find_map_none_changes_item_type() -> Option<bool> {
"".chars().find_map(|_| None)
}
+
+fn issue11260() {
+ let y = Some(1);
+ let _x = std::iter::once(1).find_map(|n| (n > 1).then_some(n));
+ let _x = std::iter::once(1).find_map(|n| (n > 1).then_some(y)); // different option, so can't be just `.find()`
+}
diff --git a/src/tools/clippy/tests/ui/unnecessary_find_map.stderr b/src/tools/clippy/tests/ui/unnecessary_find_map.stderr
index fb33c122f..3a995b41b 100644
--- a/src/tools/clippy/tests/ui/unnecessary_find_map.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_find_map.stderr
@@ -34,5 +34,11 @@ error: this `.find_map` can be written more simply using `.map(..).next()`
LL | let _ = (0..4).find_map(|x| Some(x + 1));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to 4 previous errors
+error: this `.find_map` can be written more simply using `.find`
+ --> $DIR/unnecessary_find_map.rs:27:14
+ |
+LL | let _x = std::iter::once(1).find_map(|n| (n > 1).then_some(n));
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed
index 44530d8b1..72d52c623 100644
--- a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed
@@ -79,6 +79,26 @@ fn unwrap_methods_result() {
1;
}
+fn unwrap_from_binding() {
+ macro_rules! from_macro {
+ () => {
+ Some("")
+ };
+ }
+ let val = from_macro!();
+ let _ = val.unwrap_or("");
+}
+
+fn unwrap_unchecked() {
+ let _ = 1;
+ let _ = unsafe { 1 + *(&1 as *const i32) }; // needs to keep the unsafe block
+ let _ = 1 + 1;
+ let _ = 1;
+ let _ = unsafe { 1 + *(&1 as *const i32) };
+ let _ = 1 + 1;
+ let _ = 123;
+}
+
fn main() {
unwrap_option_some();
unwrap_option_none();
@@ -86,4 +106,5 @@ fn main() {
unwrap_result_err();
unwrap_methods_option();
unwrap_methods_result();
+ unwrap_unchecked();
}
diff --git a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs
index b43e4d3a3..7d713ea20 100644
--- a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs
@@ -79,6 +79,26 @@ fn unwrap_methods_result() {
Ok::<_, ()>(1).unwrap_or_else(|_| 2);
}
+fn unwrap_from_binding() {
+ macro_rules! from_macro {
+ () => {
+ Some("")
+ };
+ }
+ let val = from_macro!();
+ let _ = val.unwrap_or("");
+}
+
+fn unwrap_unchecked() {
+ let _ = unsafe { Some(1).unwrap_unchecked() };
+ let _ = unsafe { Some(1).unwrap_unchecked() + *(&1 as *const i32) }; // needs to keep the unsafe block
+ let _ = unsafe { Some(1).unwrap_unchecked() } + 1;
+ let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() };
+ let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() + *(&1 as *const i32) };
+ let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() } + 1;
+ let _ = unsafe { Err::<(), i32>(123).unwrap_err_unchecked() };
+}
+
fn main() {
unwrap_option_some();
unwrap_option_none();
@@ -86,4 +106,5 @@ fn main() {
unwrap_result_err();
unwrap_methods_option();
unwrap_methods_result();
+ unwrap_unchecked();
}
diff --git a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr
index 905384bc8..7f603d6ef 100644
--- a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr
+++ b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr
@@ -517,5 +517,89 @@ LL - Ok::<_, ()>(1).unwrap_or_else(|_| 2);
LL + 1;
|
-error: aborting due to 46 previous errors
+error: used `unwrap_unchecked()` on `Some` value
+ --> $DIR/unnecessary_literal_unwrap.rs:93:22
+ |
+LL | let _ = unsafe { Some(1).unwrap_unchecked() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Some` and `unwrap_unchecked()`
+ |
+LL - let _ = unsafe { Some(1).unwrap_unchecked() };
+LL + let _ = 1;
+ |
+
+error: used `unwrap_unchecked()` on `Some` value
+ --> $DIR/unnecessary_literal_unwrap.rs:94:22
+ |
+LL | let _ = unsafe { Some(1).unwrap_unchecked() + *(&1 as *const i32) }; // needs to keep the unsafe block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Some` and `unwrap_unchecked()`
+ |
+LL - let _ = unsafe { Some(1).unwrap_unchecked() + *(&1 as *const i32) }; // needs to keep the unsafe block
+LL + let _ = unsafe { 1 + *(&1 as *const i32) }; // needs to keep the unsafe block
+ |
+
+error: used `unwrap_unchecked()` on `Some` value
+ --> $DIR/unnecessary_literal_unwrap.rs:95:22
+ |
+LL | let _ = unsafe { Some(1).unwrap_unchecked() } + 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Some` and `unwrap_unchecked()`
+ |
+LL - let _ = unsafe { Some(1).unwrap_unchecked() } + 1;
+LL + let _ = 1 + 1;
+ |
+
+error: used `unwrap_unchecked()` on `Ok` value
+ --> $DIR/unnecessary_literal_unwrap.rs:96:22
+ |
+LL | let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Ok` and `unwrap_unchecked()`
+ |
+LL - let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() };
+LL + let _ = 1;
+ |
+
+error: used `unwrap_unchecked()` on `Ok` value
+ --> $DIR/unnecessary_literal_unwrap.rs:97:22
+ |
+LL | let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() + *(&1 as *const i32) };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Ok` and `unwrap_unchecked()`
+ |
+LL - let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() + *(&1 as *const i32) };
+LL + let _ = unsafe { 1 + *(&1 as *const i32) };
+ |
+
+error: used `unwrap_unchecked()` on `Ok` value
+ --> $DIR/unnecessary_literal_unwrap.rs:98:22
+ |
+LL | let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() } + 1;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Ok` and `unwrap_unchecked()`
+ |
+LL - let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() } + 1;
+LL + let _ = 1 + 1;
+ |
+
+error: used `unwrap_err_unchecked()` on `Err` value
+ --> $DIR/unnecessary_literal_unwrap.rs:99:22
+ |
+LL | let _ = unsafe { Err::<(), i32>(123).unwrap_err_unchecked() };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+help: remove the `Err` and `unwrap_err_unchecked()`
+ |
+LL - let _ = unsafe { Err::<(), i32>(123).unwrap_err_unchecked() };
+LL + let _ = 123;
+ |
+
+error: aborting due to 53 previous errors
diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed
index 592a53f3a..cb7562351 100644
--- a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed
+++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed
@@ -477,7 +477,8 @@ mod issue_10021 {
mod issue_10033 {
#![allow(dead_code)]
- use std::{fmt::Display, ops::Deref};
+ use std::fmt::Display;
+ use std::ops::Deref;
fn _main() {
let f = Foo;
diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs
index f2e48b1c4..f82ddb2d2 100644
--- a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs
+++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs
@@ -477,7 +477,8 @@ mod issue_10021 {
mod issue_10033 {
#![allow(dead_code)]
- use std::{fmt::Display, ops::Deref};
+ use std::fmt::Display;
+ use std::ops::Deref;
fn _main() {
let f = Foo;
diff --git a/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs b/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs
index d29888ac6..04f6ef29a 100644
--- a/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs
+++ b/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs
@@ -8,9 +8,13 @@ use std::cell::UnsafeCell as TotallySafeCellAgain;
// Shouldn't error
use std::cell::RefCell as ProbablyNotUnsafe;
+
use std::cell::RefCell as RefCellThatCantBeUnsafe;
+
use std::cell::UnsafeCell as SuperDangerousUnsafeCell;
+
use std::cell::UnsafeCell as Dangerunsafe;
+
use std::cell::UnsafeCell as Bombsawayunsafe;
mod mod_with_some_unsafe_things {
@@ -20,8 +24,12 @@ mod mod_with_some_unsafe_things {
use mod_with_some_unsafe_things::Unsafe as LieAboutModSafety;
+// merged imports
+use mod_with_some_unsafe_things::{Unsafe as A, Unsafe as B};
+
// Shouldn't error
use mod_with_some_unsafe_things::Safe as IPromiseItsSafeThisTime;
+
use mod_with_some_unsafe_things::Unsafe as SuperUnsafeModThing;
#[allow(clippy::unsafe_removed_from_name)]
diff --git a/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr b/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr
index 4f871cbe4..090d917bd 100644
--- a/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr
+++ b/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr
@@ -13,10 +13,22 @@ LL | use std::cell::UnsafeCell as TotallySafeCellAgain;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: removed `unsafe` from the name of `Unsafe` in use as `LieAboutModSafety`
- --> $DIR/unsafe_removed_from_name.rs:21:1
+ --> $DIR/unsafe_removed_from_name.rs:25:1
|
LL | use mod_with_some_unsafe_things::Unsafe as LieAboutModSafety;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to 3 previous errors
+error: removed `unsafe` from the name of `Unsafe` in use as `A`
+ --> $DIR/unsafe_removed_from_name.rs:28:1
+ |
+LL | use mod_with_some_unsafe_things::{Unsafe as A, Unsafe as B};
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: removed `unsafe` from the name of `Unsafe` in use as `B`
+ --> $DIR/unsafe_removed_from_name.rs:28:1
+ |
+LL | use mod_with_some_unsafe_things::{Unsafe as A, Unsafe as B};
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
diff --git a/src/tools/clippy/tests/ui/unused_async.rs b/src/tools/clippy/tests/ui/unused_async.rs
index 69e46ab47..1d188025e 100644
--- a/src/tools/clippy/tests/ui/unused_async.rs
+++ b/src/tools/clippy/tests/ui/unused_async.rs
@@ -37,6 +37,23 @@ mod issue10459 {
}
}
+mod issue9695 {
+ use std::future::Future;
+
+ async fn f() {}
+ async fn f2() {}
+ async fn f3() {}
+
+ fn needs_async_fn<F: Future<Output = ()>>(_: fn() -> F) {}
+
+ fn test() {
+ let x = f;
+ needs_async_fn(x); // async needed in f
+ needs_async_fn(f2); // async needed in f2
+ f3(); // async not needed in f3
+ }
+}
+
async fn foo() -> i32 {
4
}
diff --git a/src/tools/clippy/tests/ui/unused_async.stderr b/src/tools/clippy/tests/ui/unused_async.stderr
index ffae8366b..8d9b72c48 100644
--- a/src/tools/clippy/tests/ui/unused_async.stderr
+++ b/src/tools/clippy/tests/ui/unused_async.stderr
@@ -17,7 +17,15 @@ LL | ready(()).await;
= note: `-D clippy::unused-async` implied by `-D warnings`
error: unused `async` for function with no await statements
- --> $DIR/unused_async.rs:40:1
+ --> $DIR/unused_async.rs:45:5
+ |
+LL | async fn f3() {}
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: consider removing the `async` from this function
+
+error: unused `async` for function with no await statements
+ --> $DIR/unused_async.rs:57:1
|
LL | / async fn foo() -> i32 {
LL | | 4
@@ -27,7 +35,7 @@ LL | | }
= help: consider removing the `async` from this function
error: unused `async` for function with no await statements
- --> $DIR/unused_async.rs:51:5
+ --> $DIR/unused_async.rs:68:5
|
LL | / async fn unused(&self) -> i32 {
LL | | 1
@@ -36,5 +44,5 @@ LL | | }
|
= help: consider removing the `async` from this function
-error: aborting due to 3 previous errors
+error: aborting due to 4 previous errors
diff --git a/src/tools/clippy/tests/ui/unused_io_amount.rs b/src/tools/clippy/tests/ui/unused_io_amount.rs
index 8d3e094b7..e9d1eeb31 100644
--- a/src/tools/clippy/tests/ui/unused_io_amount.rs
+++ b/src/tools/clippy/tests/ui/unused_io_amount.rs
@@ -1,4 +1,4 @@
-#![allow(dead_code)]
+#![allow(dead_code, clippy::needless_pass_by_ref_mut)]
#![warn(clippy::unused_io_amount)]
extern crate futures;
diff --git a/src/tools/clippy/tests/ui/unused_peekable.rs b/src/tools/clippy/tests/ui/unused_peekable.rs
index 7374dfdf9..b227f8660 100644
--- a/src/tools/clippy/tests/ui/unused_peekable.rs
+++ b/src/tools/clippy/tests/ui/unused_peekable.rs
@@ -1,8 +1,7 @@
#![warn(clippy::unused_peekable)]
#![allow(clippy::no_effect)]
-use std::iter::Empty;
-use std::iter::Peekable;
+use std::iter::{Empty, Peekable};
fn main() {
invalid();
diff --git a/src/tools/clippy/tests/ui/unused_peekable.stderr b/src/tools/clippy/tests/ui/unused_peekable.stderr
index 54788f2fa..d969232fd 100644
--- a/src/tools/clippy/tests/ui/unused_peekable.stderr
+++ b/src/tools/clippy/tests/ui/unused_peekable.stderr
@@ -1,5 +1,5 @@
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:14:9
+ --> $DIR/unused_peekable.rs:13:9
|
LL | let peekable = std::iter::empty::<u32>().peekable();
| ^^^^^^^^
@@ -8,7 +8,7 @@ LL | let peekable = std::iter::empty::<u32>().peekable();
= note: `-D clippy::unused-peekable` implied by `-D warnings`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:18:9
+ --> $DIR/unused_peekable.rs:17:9
|
LL | let new_local = old_local;
| ^^^^^^^^^
@@ -16,7 +16,7 @@ LL | let new_local = old_local;
= help: consider removing the call to `peekable`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:22:9
+ --> $DIR/unused_peekable.rs:21:9
|
LL | let by_mut_ref = &mut by_mut_ref_test;
| ^^^^^^^^^^
@@ -24,7 +24,7 @@ LL | let by_mut_ref = &mut by_mut_ref_test;
= help: consider removing the call to `peekable`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:29:9
+ --> $DIR/unused_peekable.rs:28:9
|
LL | let peekable_from_fn = returns_peekable();
| ^^^^^^^^^^^^^^^^
@@ -32,7 +32,7 @@ LL | let peekable_from_fn = returns_peekable();
= help: consider removing the call to `peekable`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:32:13
+ --> $DIR/unused_peekable.rs:31:13
|
LL | let mut peekable_using_iterator_method = std::iter::empty::<u32>().peekable();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -40,7 +40,7 @@ LL | let mut peekable_using_iterator_method = std::iter::empty::<u32>().peek
= help: consider removing the call to `peekable`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:37:9
+ --> $DIR/unused_peekable.rs:36:9
|
LL | let passed_along_ref = std::iter::empty::<u32>().peekable();
| ^^^^^^^^^^^^^^^^
@@ -48,7 +48,7 @@ LL | let passed_along_ref = std::iter::empty::<u32>().peekable();
= help: consider removing the call to `peekable`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:42:9
+ --> $DIR/unused_peekable.rs:41:9
|
LL | let _by_ref = by_ref_test.by_ref();
| ^^^^^^^
@@ -56,7 +56,7 @@ LL | let _by_ref = by_ref_test.by_ref();
= help: consider removing the call to `peekable`
error: `peek` never called on `Peekable` iterator
- --> $DIR/unused_peekable.rs:44:13
+ --> $DIR/unused_peekable.rs:43:13
|
LL | let mut peekable_in_for_loop = std::iter::empty::<u32>().peekable();
| ^^^^^^^^^^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/unwrap.stderr b/src/tools/clippy/tests/ui/unwrap.stderr
index 3796d942f..41db819f6 100644
--- a/src/tools/clippy/tests/ui/unwrap.stderr
+++ b/src/tools/clippy/tests/ui/unwrap.stderr
@@ -4,7 +4,8 @@ error: used `unwrap()` on an `Option` value
LL | let _ = opt.unwrap();
| ^^^^^^^^^^^^
|
- = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is `None`, it will panic
+ = help: consider using `expect()` to provide a better panic message
= note: `-D clippy::unwrap-used` implied by `-D warnings`
error: used `unwrap()` on a `Result` value
@@ -13,7 +14,8 @@ error: used `unwrap()` on a `Result` value
LL | let _ = res.unwrap();
| ^^^^^^^^^^^^
|
- = help: if you don't want to handle the `Err` case gracefully, consider using `expect()` to provide a better panic message
+ = note: if this value is an `Err`, it will panic
+ = help: consider using `expect()` to provide a better panic message
error: used `unwrap_err()` on a `Result` value
--> $DIR/unwrap.rs:12:13
@@ -21,7 +23,8 @@ error: used `unwrap_err()` on a `Result` value
LL | let _ = res.unwrap_err();
| ^^^^^^^^^^^^^^^^
|
- = help: if you don't want to handle the `Ok` case gracefully, consider using `expect_err()` to provide a better panic message
+ = note: if this value is an `Ok`, it will panic
+ = help: consider using `expect_err()` to provide a better panic message
error: aborting due to 3 previous errors
diff --git a/src/tools/clippy/tests/ui/unwrap_expect_used.rs b/src/tools/clippy/tests/ui/unwrap_expect_used.rs
index 7f57efc53..26f92ccde 100644
--- a/src/tools/clippy/tests/ui/unwrap_expect_used.rs
+++ b/src/tools/clippy/tests/ui/unwrap_expect_used.rs
@@ -1,5 +1,8 @@
#![warn(clippy::unwrap_used, clippy::expect_used)]
#![allow(clippy::unnecessary_literal_unwrap)]
+#![feature(never_type)]
+
+use std::convert::Infallible;
trait OptionExt {
type Item;
@@ -28,6 +31,14 @@ fn main() {
Some(3).unwrap_err();
Some(3).expect_err("Hellow none!");
+ // Issue #11245: The `Err` variant can never be constructed so do not lint this.
+ let x: Result<(), !> = Ok(());
+ x.unwrap();
+ x.expect("is `!` (never)");
+ let x: Result<(), Infallible> = Ok(());
+ x.unwrap();
+ x.expect("is never-like (0 variants)");
+
let a: Result<i32, i32> = Ok(3);
a.unwrap();
a.expect("Hello world!");
diff --git a/src/tools/clippy/tests/ui/unwrap_expect_used.stderr b/src/tools/clippy/tests/ui/unwrap_expect_used.stderr
index 1a551ab5a..f66e47612 100644
--- a/src/tools/clippy/tests/ui/unwrap_expect_used.stderr
+++ b/src/tools/clippy/tests/ui/unwrap_expect_used.stderr
@@ -1,52 +1,52 @@
error: used `unwrap()` on an `Option` value
- --> $DIR/unwrap_expect_used.rs:24:5
+ --> $DIR/unwrap_expect_used.rs:27:5
|
LL | Some(3).unwrap();
| ^^^^^^^^^^^^^^^^
|
- = help: if this value is `None`, it will panic
+ = note: if this value is `None`, it will panic
= note: `-D clippy::unwrap-used` implied by `-D warnings`
error: used `expect()` on an `Option` value
- --> $DIR/unwrap_expect_used.rs:25:5
+ --> $DIR/unwrap_expect_used.rs:28:5
|
LL | Some(3).expect("Hello world!");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if this value is `None`, it will panic
+ = note: if this value is `None`, it will panic
= note: `-D clippy::expect-used` implied by `-D warnings`
error: used `unwrap()` on a `Result` value
- --> $DIR/unwrap_expect_used.rs:32:5
+ --> $DIR/unwrap_expect_used.rs:43:5
|
LL | a.unwrap();
| ^^^^^^^^^^
|
- = help: if this value is an `Err`, it will panic
+ = note: if this value is an `Err`, it will panic
error: used `expect()` on a `Result` value
- --> $DIR/unwrap_expect_used.rs:33:5
+ --> $DIR/unwrap_expect_used.rs:44:5
|
LL | a.expect("Hello world!");
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if this value is an `Err`, it will panic
+ = note: if this value is an `Err`, it will panic
error: used `unwrap_err()` on a `Result` value
- --> $DIR/unwrap_expect_used.rs:34:5
+ --> $DIR/unwrap_expect_used.rs:45:5
|
LL | a.unwrap_err();
| ^^^^^^^^^^^^^^
|
- = help: if this value is an `Ok`, it will panic
+ = note: if this value is an `Ok`, it will panic
error: used `expect_err()` on a `Result` value
- --> $DIR/unwrap_expect_used.rs:35:5
+ --> $DIR/unwrap_expect_used.rs:46:5
|
LL | a.expect_err("Hello error!");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = help: if this value is an `Ok`, it will panic
+ = note: if this value is an `Ok`, it will panic
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/unwrap_or.stderr b/src/tools/clippy/tests/ui/unwrap_or.stderr
index cf720eaaf..e384bbbb0 100644
--- a/src/tools/clippy/tests/ui/unwrap_or.stderr
+++ b/src/tools/clippy/tests/ui/unwrap_or.stderr
@@ -2,7 +2,7 @@ error: use of `unwrap_or` followed by a function call
--> $DIR/unwrap_or.rs:5:47
|
LL | let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "Fail".to_string())`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| "Fail".to_string())`
|
= note: `-D clippy::or-fun-call` implied by `-D warnings`
@@ -10,7 +10,7 @@ error: use of `unwrap_or` followed by a function call
--> $DIR/unwrap_or.rs:9:47
|
LL | let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len();
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "Fail".to_string())`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| "Fail".to_string())`
error: aborting due to 2 previous errors
diff --git a/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed b/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed
index 08b89a18b..acdb96942 100644
--- a/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed
+++ b/src/tools/clippy/tests/ui/unwrap_or_else_default.fixed
@@ -1,10 +1,10 @@
//@run-rustfix
-#![warn(clippy::unwrap_or_else_default)]
+#![warn(clippy::unwrap_or_default)]
#![allow(dead_code)]
#![allow(clippy::unnecessary_wraps, clippy::unnecessary_literal_unwrap)]
-/// Checks implementation of the `UNWRAP_OR_ELSE_DEFAULT` lint.
+/// Checks implementation of the `UNWRAP_OR_DEFAULT` lint.
fn unwrap_or_else_default() {
struct Foo;
@@ -74,4 +74,62 @@ fn unwrap_or_else_default() {
empty_string.unwrap_or_default();
}
+fn type_certainty(option: Option<Vec<u64>>) {
+ option.unwrap_or_default().push(1);
+
+ let option: std::option::Option<std::vec::Vec<u64>> = None;
+ option.unwrap_or_default().push(1);
+
+ let option: Option<Vec<u64>> = None;
+ option.unwrap_or_default().push(1);
+
+ let option = std::option::Option::<std::vec::Vec<u64>>::None;
+ option.unwrap_or_default().push(1);
+
+ let option = Option::<Vec<u64>>::None;
+ option.unwrap_or_default().push(1);
+
+ let option = std::option::Option::None::<std::vec::Vec<u64>>;
+ option.unwrap_or_default().push(1);
+
+ let option = Option::None::<Vec<u64>>;
+ option.unwrap_or_default().push(1);
+
+ let option = None::<Vec<u64>>;
+ option.unwrap_or_default().push(1);
+
+ // should not be changed: type annotation with infer, unconcretized initializer
+ let option: Option<Vec<_>> = None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ // should not be changed: no type annotation, unconcretized initializer
+ let option = Option::None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ // should not be changed: no type annotation, unconcretized initializer
+ let option = None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ type Alias = Option<Vec<u32>>;
+ let option: Alias = Option::<Vec<u32>>::Some(Vec::new());
+ option.unwrap_or_default().push(1);
+}
+
+fn method_call_with_deref() {
+ use std::cell::RefCell;
+ use std::collections::HashMap;
+
+ let cell = RefCell::new(HashMap::<u64, HashMap<u64, String>>::new());
+
+ let mut outer_map = cell.borrow_mut();
+
+ #[allow(unused_assignments)]
+ let mut option = None;
+ option = Some(0);
+
+ let inner_map = outer_map.get_mut(&option.unwrap()).unwrap();
+
+ let _ = inner_map.entry(0).or_default();
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/unwrap_or_else_default.rs b/src/tools/clippy/tests/ui/unwrap_or_else_default.rs
index ad2a74490..55ccd00e1 100644
--- a/src/tools/clippy/tests/ui/unwrap_or_else_default.rs
+++ b/src/tools/clippy/tests/ui/unwrap_or_else_default.rs
@@ -1,10 +1,10 @@
//@run-rustfix
-#![warn(clippy::unwrap_or_else_default)]
+#![warn(clippy::unwrap_or_default)]
#![allow(dead_code)]
#![allow(clippy::unnecessary_wraps, clippy::unnecessary_literal_unwrap)]
-/// Checks implementation of the `UNWRAP_OR_ELSE_DEFAULT` lint.
+/// Checks implementation of the `UNWRAP_OR_DEFAULT` lint.
fn unwrap_or_else_default() {
struct Foo;
@@ -74,4 +74,62 @@ fn unwrap_or_else_default() {
empty_string.unwrap_or_else(|| "".to_string());
}
+fn type_certainty(option: Option<Vec<u64>>) {
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option: std::option::Option<std::vec::Vec<u64>> = None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option: Option<Vec<u64>> = None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option = std::option::Option::<std::vec::Vec<u64>>::None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option = Option::<Vec<u64>>::None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option = std::option::Option::None::<std::vec::Vec<u64>>;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option = Option::None::<Vec<u64>>;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ let option = None::<Vec<u64>>;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ // should not be changed: type annotation with infer, unconcretized initializer
+ let option: Option<Vec<_>> = None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ // should not be changed: no type annotation, unconcretized initializer
+ let option = Option::None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ // should not be changed: no type annotation, unconcretized initializer
+ let option = None;
+ option.unwrap_or_else(Vec::new).push(1);
+
+ type Alias = Option<Vec<u32>>;
+ let option: Alias = Option::<Vec<u32>>::Some(Vec::new());
+ option.unwrap_or_else(Vec::new).push(1);
+}
+
+fn method_call_with_deref() {
+ use std::cell::RefCell;
+ use std::collections::HashMap;
+
+ let cell = RefCell::new(HashMap::<u64, HashMap<u64, String>>::new());
+
+ let mut outer_map = cell.borrow_mut();
+
+ #[allow(unused_assignments)]
+ let mut option = None;
+ option = Some(0);
+
+ let inner_map = outer_map.get_mut(&option.unwrap()).unwrap();
+
+ let _ = inner_map.entry(0).or_insert_with(Default::default);
+}
+
fn main() {}
diff --git a/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr b/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr
index d2b921222..af662c6de 100644
--- a/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr
+++ b/src/tools/clippy/tests/ui/unwrap_or_else_default.stderr
@@ -1,40 +1,100 @@
-error: use of `.unwrap_or_else(..)` to construct default value
- --> $DIR/unwrap_or_else_default.rs:48:5
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:48:14
|
LL | with_new.unwrap_or_else(Vec::new);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_new.unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
|
- = note: `-D clippy::unwrap-or-else-default` implied by `-D warnings`
+ = note: `-D clippy::unwrap-or-default` implied by `-D warnings`
-error: use of `.unwrap_or_else(..)` to construct default value
- --> $DIR/unwrap_or_else_default.rs:62:5
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:62:23
|
LL | with_real_default.unwrap_or_else(<HasDefaultAndDuplicate as Default>::default);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_real_default.unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `.unwrap_or_else(..)` to construct default value
- --> $DIR/unwrap_or_else_default.rs:65:5
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:65:24
|
LL | with_default_trait.unwrap_or_else(Default::default);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_default_trait.unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `.unwrap_or_else(..)` to construct default value
- --> $DIR/unwrap_or_else_default.rs:68:5
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:68:23
|
LL | with_default_type.unwrap_or_else(u64::default);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_default_type.unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `.unwrap_or_else(..)` to construct default value
- --> $DIR/unwrap_or_else_default.rs:71:5
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:71:23
|
LL | with_default_type.unwrap_or_else(Vec::new);
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `with_default_type.unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: use of `.unwrap_or_else(..)` to construct default value
- --> $DIR/unwrap_or_else_default.rs:74:5
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:74:18
|
LL | empty_string.unwrap_or_else(|| "".to_string());
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `empty_string.unwrap_or_default()`
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
-error: aborting due to 6 previous errors
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:78:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:81:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:84:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:87:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:90:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:93:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:96:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:99:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `unwrap_or_else` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:115:12
+ |
+LL | option.unwrap_or_else(Vec::new).push(1);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()`
+
+error: use of `or_insert_with` to construct default value
+ --> $DIR/unwrap_or_else_default.rs:132:32
+ |
+LL | let _ = inner_map.entry(0).or_insert_with(Default::default);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()`
+
+error: aborting due to 16 previous errors
diff --git a/src/tools/clippy/tests/ui/useless_asref.fixed b/src/tools/clippy/tests/ui/useless_asref.fixed
index 490d36ae6..e42731f9b 100644
--- a/src/tools/clippy/tests/ui/useless_asref.fixed
+++ b/src/tools/clippy/tests/ui/useless_asref.fixed
@@ -1,6 +1,10 @@
//@run-rustfix
#![deny(clippy::useless_asref)]
-#![allow(clippy::explicit_auto_deref, clippy::uninlined_format_args)]
+#![allow(
+ clippy::explicit_auto_deref,
+ clippy::uninlined_format_args,
+ clippy::needless_pass_by_ref_mut
+)]
use std::fmt::Debug;
diff --git a/src/tools/clippy/tests/ui/useless_asref.rs b/src/tools/clippy/tests/ui/useless_asref.rs
index f2681af92..50c9990bb 100644
--- a/src/tools/clippy/tests/ui/useless_asref.rs
+++ b/src/tools/clippy/tests/ui/useless_asref.rs
@@ -1,6 +1,10 @@
//@run-rustfix
#![deny(clippy::useless_asref)]
-#![allow(clippy::explicit_auto_deref, clippy::uninlined_format_args)]
+#![allow(
+ clippy::explicit_auto_deref,
+ clippy::uninlined_format_args,
+ clippy::needless_pass_by_ref_mut
+)]
use std::fmt::Debug;
diff --git a/src/tools/clippy/tests/ui/useless_asref.stderr b/src/tools/clippy/tests/ui/useless_asref.stderr
index 67ce8b64e..c97851ac6 100644
--- a/src/tools/clippy/tests/ui/useless_asref.stderr
+++ b/src/tools/clippy/tests/ui/useless_asref.stderr
@@ -1,8 +1,8 @@
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:43:18
+ --> $DIR/useless_asref.rs:47:18
|
LL | foo_rstr(rstr.as_ref());
- | ^^^^^^^^^^^^^ help: try this: `rstr`
+ | ^^^^^^^^^^^^^ help: try: `rstr`
|
note: the lint level is defined here
--> $DIR/useless_asref.rs:2:9
@@ -11,64 +11,64 @@ LL | #![deny(clippy::useless_asref)]
| ^^^^^^^^^^^^^^^^^^^^^
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:45:20
+ --> $DIR/useless_asref.rs:49:20
|
LL | foo_rslice(rslice.as_ref());
- | ^^^^^^^^^^^^^^^ help: try this: `rslice`
+ | ^^^^^^^^^^^^^^^ help: try: `rslice`
error: this call to `as_mut` does nothing
- --> $DIR/useless_asref.rs:49:21
+ --> $DIR/useless_asref.rs:53:21
|
LL | foo_mrslice(mrslice.as_mut());
- | ^^^^^^^^^^^^^^^^ help: try this: `mrslice`
+ | ^^^^^^^^^^^^^^^^ help: try: `mrslice`
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:51:20
+ --> $DIR/useless_asref.rs:55:20
|
LL | foo_rslice(mrslice.as_ref());
- | ^^^^^^^^^^^^^^^^ help: try this: `mrslice`
+ | ^^^^^^^^^^^^^^^^ help: try: `mrslice`
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:58:20
+ --> $DIR/useless_asref.rs:62:20
|
LL | foo_rslice(rrrrrslice.as_ref());
- | ^^^^^^^^^^^^^^^^^^^ help: try this: `rrrrrslice`
+ | ^^^^^^^^^^^^^^^^^^^ help: try: `rrrrrslice`
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:60:18
+ --> $DIR/useless_asref.rs:64:18
|
LL | foo_rstr(rrrrrstr.as_ref());
- | ^^^^^^^^^^^^^^^^^ help: try this: `rrrrrstr`
+ | ^^^^^^^^^^^^^^^^^ help: try: `rrrrrstr`
error: this call to `as_mut` does nothing
- --> $DIR/useless_asref.rs:65:21
+ --> $DIR/useless_asref.rs:69:21
|
LL | foo_mrslice(mrrrrrslice.as_mut());
- | ^^^^^^^^^^^^^^^^^^^^ help: try this: `mrrrrrslice`
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `mrrrrrslice`
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:67:20
+ --> $DIR/useless_asref.rs:71:20
|
LL | foo_rslice(mrrrrrslice.as_ref());
- | ^^^^^^^^^^^^^^^^^^^^ help: try this: `mrrrrrslice`
+ | ^^^^^^^^^^^^^^^^^^^^ help: try: `mrrrrrslice`
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:71:16
+ --> $DIR/useless_asref.rs:75:16
|
LL | foo_rrrrmr((&&&&MoreRef).as_ref());
- | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(&&&&MoreRef)`
+ | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `(&&&&MoreRef)`
error: this call to `as_mut` does nothing
- --> $DIR/useless_asref.rs:121:13
+ --> $DIR/useless_asref.rs:125:13
|
LL | foo_mrt(mrt.as_mut());
- | ^^^^^^^^^^^^ help: try this: `mrt`
+ | ^^^^^^^^^^^^ help: try: `mrt`
error: this call to `as_ref` does nothing
- --> $DIR/useless_asref.rs:123:12
+ --> $DIR/useless_asref.rs:127:12
|
LL | foo_rt(mrt.as_ref());
- | ^^^^^^^^^^^^ help: try this: `mrt`
+ | ^^^^^^^^^^^^ help: try: `mrt`
error: aborting due to 11 previous errors
diff --git a/src/tools/clippy/tests/ui/vec.fixed b/src/tools/clippy/tests/ui/vec.fixed
index fcdc917c1..7a7d0026f 100644
--- a/src/tools/clippy/tests/ui/vec.fixed
+++ b/src/tools/clippy/tests/ui/vec.fixed
@@ -115,6 +115,46 @@ fn main() {
let _x = vec![1; 201];
}
+fn issue11075() {
+ macro_rules! repro {
+ ($e:expr) => {
+ stringify!($e)
+ };
+ }
+ for _string in [repro!(true), repro!(null)] {
+ unimplemented!();
+ }
+
+ macro_rules! in_macro {
+ ($e:expr, $vec:expr, $vec2:expr) => {{
+ vec![1; 2].fill(3);
+ vec![1, 2].fill(3);
+ for _ in vec![1, 2] {}
+ for _ in vec![1; 2] {}
+ for _ in vec![$e, $e] {}
+ for _ in vec![$e; 2] {}
+ for _ in $vec {}
+ for _ in $vec2 {}
+ }};
+ }
+
+ in_macro!(1, [1, 2], [1; 2]);
+
+ macro_rules! from_macro {
+ () => {
+ vec![1, 2, 3]
+ };
+ }
+ macro_rules! from_macro_repeat {
+ () => {
+ vec![1; 3]
+ };
+ }
+
+ for _ in from_macro!() {}
+ for _ in from_macro_repeat!() {}
+}
+
#[clippy::msrv = "1.53"]
fn above() {
for a in [1, 2, 3] {
diff --git a/src/tools/clippy/tests/ui/vec.rs b/src/tools/clippy/tests/ui/vec.rs
index 0404d8cdb..cbe7685b4 100644
--- a/src/tools/clippy/tests/ui/vec.rs
+++ b/src/tools/clippy/tests/ui/vec.rs
@@ -115,6 +115,46 @@ fn main() {
let _x = vec![1; 201];
}
+fn issue11075() {
+ macro_rules! repro {
+ ($e:expr) => {
+ stringify!($e)
+ };
+ }
+ for _string in vec![repro!(true), repro!(null)] {
+ unimplemented!();
+ }
+
+ macro_rules! in_macro {
+ ($e:expr, $vec:expr, $vec2:expr) => {{
+ vec![1; 2].fill(3);
+ vec![1, 2].fill(3);
+ for _ in vec![1, 2] {}
+ for _ in vec![1; 2] {}
+ for _ in vec![$e, $e] {}
+ for _ in vec![$e; 2] {}
+ for _ in $vec {}
+ for _ in $vec2 {}
+ }};
+ }
+
+ in_macro!(1, vec![1, 2], vec![1; 2]);
+
+ macro_rules! from_macro {
+ () => {
+ vec![1, 2, 3]
+ };
+ }
+ macro_rules! from_macro_repeat {
+ () => {
+ vec![1; 3]
+ };
+ }
+
+ for _ in from_macro!() {}
+ for _ in from_macro_repeat!() {}
+}
+
#[clippy::msrv = "1.53"]
fn above() {
for a in vec![1, 2, 3] {
diff --git a/src/tools/clippy/tests/ui/vec.stderr b/src/tools/clippy/tests/ui/vec.stderr
index 33d565b2d..8f6d2a1df 100644
--- a/src/tools/clippy/tests/ui/vec.stderr
+++ b/src/tools/clippy/tests/ui/vec.stderr
@@ -85,16 +85,34 @@ LL | for _ in vec![1, 2, 3] {}
| ^^^^^^^^^^^^^ help: you can use an array directly: `[1, 2, 3]`
error: useless use of `vec!`
- --> $DIR/vec.rs:120:14
+ --> $DIR/vec.rs:124:20
+ |
+LL | for _string in vec![repro!(true), repro!(null)] {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can use an array directly: `[repro!(true), repro!(null)]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:141:18
+ |
+LL | in_macro!(1, vec![1, 2], vec![1; 2]);
+ | ^^^^^^^^^^ help: you can use an array directly: `[1, 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:141:30
+ |
+LL | in_macro!(1, vec![1, 2], vec![1; 2]);
+ | ^^^^^^^^^^ help: you can use an array directly: `[1; 2]`
+
+error: useless use of `vec!`
+ --> $DIR/vec.rs:160:14
|
LL | for a in vec![1, 2, 3] {
| ^^^^^^^^^^^^^ help: you can use an array directly: `[1, 2, 3]`
error: useless use of `vec!`
- --> $DIR/vec.rs:124:14
+ --> $DIR/vec.rs:164:14
|
LL | for a in vec![String::new(), String::new()] {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can use an array directly: `[String::new(), String::new()]`
-error: aborting due to 16 previous errors
+error: aborting due to 19 previous errors
diff --git a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr
index 30d29aa4e..5b88ae4ab 100644
--- a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr
+++ b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr
@@ -2,7 +2,7 @@ error: wildcard match will also match any future added variants
--> $DIR/wildcard_enum_match_arm.rs:40:9
|
LL | _ => eprintln!("Not red"),
- | ^ help: try this: `Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan`
+ | ^ help: try: `Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan`
|
note: the lint level is defined here
--> $DIR/wildcard_enum_match_arm.rs:3:9
@@ -14,31 +14,31 @@ error: wildcard match will also match any future added variants
--> $DIR/wildcard_enum_match_arm.rs:44:9
|
LL | _not_red => eprintln!("Not red"),
- | ^^^^^^^^ help: try this: `_not_red @ Color::Green | _not_red @ Color::Blue | _not_red @ Color::Rgb(..) | _not_red @ Color::Cyan`
+ | ^^^^^^^^ help: try: `_not_red @ Color::Green | _not_red @ Color::Blue | _not_red @ Color::Rgb(..) | _not_red @ Color::Cyan`
error: wildcard match will also match any future added variants
--> $DIR/wildcard_enum_match_arm.rs:48:9
|
LL | not_red => format!("{:?}", not_red),
- | ^^^^^^^ help: try this: `not_red @ Color::Green | not_red @ Color::Blue | not_red @ Color::Rgb(..) | not_red @ Color::Cyan`
+ | ^^^^^^^ help: try: `not_red @ Color::Green | not_red @ Color::Blue | not_red @ Color::Rgb(..) | not_red @ Color::Cyan`
error: wildcard match will also match any future added variants
--> $DIR/wildcard_enum_match_arm.rs:64:9
|
LL | _ => "No red",
- | ^ help: try this: `Color::Red | Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan`
+ | ^ help: try: `Color::Red | Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan`
error: wildcard matches known variants and will also match future added variants
--> $DIR/wildcard_enum_match_arm.rs:81:9
|
LL | _ => {},
- | ^ help: try this: `ErrorKind::PermissionDenied | _`
+ | ^ help: try: `ErrorKind::PermissionDenied | _`
error: wildcard match will also match any future added variants
--> $DIR/wildcard_enum_match_arm.rs:99:13
|
LL | _ => (),
- | ^ help: try this: `Enum::B | Enum::__Private`
+ | ^ help: try: `Enum::B | Enum::__Private`
error: aborting due to 6 previous errors
diff --git a/src/tools/clippy/tests/ui/wildcard_imports.fixed b/src/tools/clippy/tests/ui/wildcard_imports.fixed
index 2961b062e..67173f406 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports.fixed
+++ b/src/tools/clippy/tests/ui/wildcard_imports.fixed
@@ -112,6 +112,7 @@ mod in_fn_test {
}
fn test_inner_nested() {
+ #[rustfmt::skip]
use self::{inner::inner_foo, inner2::inner_bar};
inner_foo();
diff --git a/src/tools/clippy/tests/ui/wildcard_imports.rs b/src/tools/clippy/tests/ui/wildcard_imports.rs
index 28508a253..8223b6930 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports.rs
+++ b/src/tools/clippy/tests/ui/wildcard_imports.rs
@@ -112,6 +112,7 @@ mod in_fn_test {
}
fn test_inner_nested() {
+ #[rustfmt::skip]
use self::{inner::*, inner2::*};
inner_foo();
diff --git a/src/tools/clippy/tests/ui/wildcard_imports.stderr b/src/tools/clippy/tests/ui/wildcard_imports.stderr
index c96b3041a..f7baf234c 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports.stderr
+++ b/src/tools/clippy/tests/ui/wildcard_imports.stderr
@@ -55,37 +55,37 @@ LL | use wildcard_imports_helper::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:115:20
+ --> $DIR/wildcard_imports.rs:116:20
|
LL | use self::{inner::*, inner2::*};
| ^^^^^^^^ help: try: `inner::inner_foo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:115:30
+ --> $DIR/wildcard_imports.rs:116:30
|
LL | use self::{inner::*, inner2::*};
| ^^^^^^^^^ help: try: `inner2::inner_bar`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:122:13
+ --> $DIR/wildcard_imports.rs:123:13
|
LL | use wildcard_imports_helper::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:151:9
+ --> $DIR/wildcard_imports.rs:152:9
|
LL | use crate::in_fn_test::*;
| ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:160:9
+ --> $DIR/wildcard_imports.rs:161:9
|
LL | use crate:: in_fn_test:: * ;
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:161:9
+ --> $DIR/wildcard_imports.rs:162:9
|
LL | use crate:: fn_mod::
| _________^
@@ -93,37 +93,37 @@ LL | | *;
| |_________^ help: try: `crate:: fn_mod::foo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:172:13
+ --> $DIR/wildcard_imports.rs:173:13
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:207:17
+ --> $DIR/wildcard_imports.rs:208:17
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::insidefoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:215:13
+ --> $DIR/wildcard_imports.rs:216:13
|
LL | use crate::super_imports::*;
| ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::super_imports::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:224:17
+ --> $DIR/wildcard_imports.rs:225:17
|
LL | use super::super::*;
| ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:233:13
+ --> $DIR/wildcard_imports.rs:234:13
|
LL | use super::super::super_imports::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports.rs:241:13
+ --> $DIR/wildcard_imports.rs:242:13
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::foofoo`
diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed
index 3aea013fb..8a6337567 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed
+++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed
@@ -106,6 +106,7 @@ mod in_fn_test {
}
fn test_inner_nested() {
+ #[rustfmt::skip]
use self::{inner::inner_foo, inner2::inner_bar};
inner_foo();
diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr
index acca9f651..af9ae6e78 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr
+++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr
@@ -55,37 +55,37 @@ LL | use wildcard_imports_helper::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:109:20
+ --> $DIR/wildcard_imports_2021.rs:110:20
|
LL | use self::{inner::*, inner2::*};
| ^^^^^^^^ help: try: `inner::inner_foo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:109:30
+ --> $DIR/wildcard_imports_2021.rs:110:30
|
LL | use self::{inner::*, inner2::*};
| ^^^^^^^^^ help: try: `inner2::inner_bar`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:116:13
+ --> $DIR/wildcard_imports_2021.rs:117:13
|
LL | use wildcard_imports_helper::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:145:9
+ --> $DIR/wildcard_imports_2021.rs:146:9
|
LL | use crate::in_fn_test::*;
| ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:154:9
+ --> $DIR/wildcard_imports_2021.rs:155:9
|
LL | use crate:: in_fn_test:: * ;
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:155:9
+ --> $DIR/wildcard_imports_2021.rs:156:9
|
LL | use crate:: fn_mod::
| _________^
@@ -93,37 +93,37 @@ LL | | *;
| |_________^ help: try: `crate:: fn_mod::foo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:166:13
+ --> $DIR/wildcard_imports_2021.rs:167:13
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:201:17
+ --> $DIR/wildcard_imports_2021.rs:202:17
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::insidefoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:209:13
+ --> $DIR/wildcard_imports_2021.rs:210:13
|
LL | use crate::super_imports::*;
| ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::super_imports::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:218:17
+ --> $DIR/wildcard_imports_2021.rs:219:17
|
LL | use super::super::*;
| ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:227:13
+ --> $DIR/wildcard_imports_2021.rs:228:13
|
LL | use super::super::super_imports::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:235:13
+ --> $DIR/wildcard_imports_2021.rs:236:13
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::foofoo`
diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed
index 3aea013fb..8a6337567 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed
+++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed
@@ -106,6 +106,7 @@ mod in_fn_test {
}
fn test_inner_nested() {
+ #[rustfmt::skip]
use self::{inner::inner_foo, inner2::inner_bar};
inner_foo();
diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr
index acca9f651..af9ae6e78 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr
+++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr
@@ -55,37 +55,37 @@ LL | use wildcard_imports_helper::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:109:20
+ --> $DIR/wildcard_imports_2021.rs:110:20
|
LL | use self::{inner::*, inner2::*};
| ^^^^^^^^ help: try: `inner::inner_foo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:109:30
+ --> $DIR/wildcard_imports_2021.rs:110:30
|
LL | use self::{inner::*, inner2::*};
| ^^^^^^^^^ help: try: `inner2::inner_bar`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:116:13
+ --> $DIR/wildcard_imports_2021.rs:117:13
|
LL | use wildcard_imports_helper::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:145:9
+ --> $DIR/wildcard_imports_2021.rs:146:9
|
LL | use crate::in_fn_test::*;
| ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:154:9
+ --> $DIR/wildcard_imports_2021.rs:155:9
|
LL | use crate:: in_fn_test:: * ;
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:155:9
+ --> $DIR/wildcard_imports_2021.rs:156:9
|
LL | use crate:: fn_mod::
| _________^
@@ -93,37 +93,37 @@ LL | | *;
| |_________^ help: try: `crate:: fn_mod::foo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:166:13
+ --> $DIR/wildcard_imports_2021.rs:167:13
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:201:17
+ --> $DIR/wildcard_imports_2021.rs:202:17
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::insidefoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:209:13
+ --> $DIR/wildcard_imports_2021.rs:210:13
|
LL | use crate::super_imports::*;
| ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::super_imports::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:218:17
+ --> $DIR/wildcard_imports_2021.rs:219:17
|
LL | use super::super::*;
| ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:227:13
+ --> $DIR/wildcard_imports_2021.rs:228:13
|
LL | use super::super::super_imports::*;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo`
error: usage of wildcard import
- --> $DIR/wildcard_imports_2021.rs:235:13
+ --> $DIR/wildcard_imports_2021.rs:236:13
|
LL | use super::*;
| ^^^^^^^^ help: try: `super::foofoo`
diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.rs b/src/tools/clippy/tests/ui/wildcard_imports_2021.rs
index 40c2d0752..52cd2c828 100644
--- a/src/tools/clippy/tests/ui/wildcard_imports_2021.rs
+++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.rs
@@ -106,6 +106,7 @@ mod in_fn_test {
}
fn test_inner_nested() {
+ #[rustfmt::skip]
use self::{inner::*, inner2::*};
inner_foo();
diff --git a/src/tools/clippy/tests/ui/write_literal.stderr b/src/tools/clippy/tests/ui/write_literal.stderr
index 1e306ae28..8b72c8bd2 100644
--- a/src/tools/clippy/tests/ui/write_literal.stderr
+++ b/src/tools/clippy/tests/ui/write_literal.stderr
@@ -5,7 +5,7 @@ LL | write!(v, "Hello {}", "world");
| ^^^^^^^
|
= note: `-D clippy::write-literal` implied by `-D warnings`
-help: try this
+help: try
|
LL - write!(v, "Hello {}", "world");
LL + write!(v, "Hello world");
@@ -17,7 +17,7 @@ error: literal with an empty format string
LL | writeln!(v, "Hello {} {}", world, "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "Hello {} {}", world, "world");
LL + writeln!(v, "Hello {} world", world);
@@ -29,7 +29,7 @@ error: literal with an empty format string
LL | writeln!(v, "Hello {}", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "Hello {}", "world");
LL + writeln!(v, "Hello world");
@@ -41,7 +41,7 @@ error: literal with an empty format string
LL | writeln!(v, "{} {:.4}", "a literal", 5);
| ^^^^^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{} {:.4}", "a literal", 5);
LL + writeln!(v, "a literal {:.4}", 5);
@@ -53,7 +53,7 @@ error: literal with an empty format string
LL | writeln!(v, "{0} {1}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{0} {1}", "hello", "world");
LL + writeln!(v, "hello {1}", "world");
@@ -65,7 +65,7 @@ error: literal with an empty format string
LL | writeln!(v, "{0} {1}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{0} {1}", "hello", "world");
LL + writeln!(v, "{0} world", "hello");
@@ -77,7 +77,7 @@ error: literal with an empty format string
LL | writeln!(v, "{1} {0}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{1} {0}", "hello", "world");
LL + writeln!(v, "world {0}", "hello");
@@ -89,7 +89,7 @@ error: literal with an empty format string
LL | writeln!(v, "{1} {0}", "hello", "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{1} {0}", "hello", "world");
LL + writeln!(v, "{1} hello", "world");
@@ -101,7 +101,7 @@ error: literal with an empty format string
LL | writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
LL + writeln!(v, "hello {bar}", bar = "world");
@@ -113,7 +113,7 @@ error: literal with an empty format string
LL | writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{foo} {bar}", foo = "hello", bar = "world");
LL + writeln!(v, "{foo} world", foo = "hello");
@@ -125,7 +125,7 @@ error: literal with an empty format string
LL | writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
LL + writeln!(v, "world {foo}", foo = "hello");
@@ -137,7 +137,7 @@ error: literal with an empty format string
LL | writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
| ^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{bar} {foo}", foo = "hello", bar = "world");
LL + writeln!(v, "{bar} hello", bar = "world");
diff --git a/src/tools/clippy/tests/ui/write_literal_2.stderr b/src/tools/clippy/tests/ui/write_literal_2.stderr
index 18591250a..c30ec385b 100644
--- a/src/tools/clippy/tests/ui/write_literal_2.stderr
+++ b/src/tools/clippy/tests/ui/write_literal_2.stderr
@@ -13,7 +13,7 @@ LL | writeln!(v, "{}", "{hello}");
| ^^^^^^^^^
|
= note: `-D clippy::write-literal` implied by `-D warnings`
-help: try this
+help: try
|
LL - writeln!(v, "{}", "{hello}");
LL + writeln!(v, "{{hello}}");
@@ -25,7 +25,7 @@ error: literal with an empty format string
LL | writeln!(v, r"{}", r"{hello}");
| ^^^^^^^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, r"{}", r"{hello}");
LL + writeln!(v, r"{{hello}}");
@@ -37,7 +37,7 @@ error: literal with an empty format string
LL | writeln!(v, "{}", '/'');
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{}", '/'');
LL + writeln!(v, "'");
@@ -49,7 +49,7 @@ error: literal with an empty format string
LL | writeln!(v, "{}", '"');
| ^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{}", '"');
LL + writeln!(v, "/"");
@@ -67,7 +67,7 @@ error: literal with an empty format string
LL | writeln!(v, r"{}", '/'');
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, r"{}", '/'');
LL + writeln!(v, r"'");
@@ -80,7 +80,7 @@ LL | / "hello /
LL | | world!"
| |_______________^
|
-help: try this
+help: try
|
LL ~ "some hello /
LL ~ world!"
@@ -92,7 +92,7 @@ error: literal with an empty format string
LL | "1", "2", "3",
| ^^^
|
-help: try this
+help: try
|
LL ~ "some 1/
LL ~ {} // {}", "2", "3",
@@ -104,7 +104,7 @@ error: literal with an empty format string
LL | "1", "2", "3",
| ^^^
|
-help: try this
+help: try
|
LL ~ 2 // {}",
LL ~ "1", "3",
@@ -116,7 +116,7 @@ error: literal with an empty format string
LL | "1", "2", "3",
| ^^^
|
-help: try this
+help: try
|
LL ~ {} // 3",
LL ~ "1", "2",
@@ -128,7 +128,7 @@ error: literal with an empty format string
LL | writeln!(v, "{}", "//");
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{}", "//");
LL + writeln!(v, "//");
@@ -140,7 +140,7 @@ error: literal with an empty format string
LL | writeln!(v, r"{}", "//");
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, r"{}", "//");
LL + writeln!(v, r"/");
@@ -152,7 +152,7 @@ error: literal with an empty format string
LL | writeln!(v, r#"{}"#, "//");
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, r#"{}"#, "//");
LL + writeln!(v, r#"/"#);
@@ -164,7 +164,7 @@ error: literal with an empty format string
LL | writeln!(v, "{}", r"/");
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{}", r"/");
LL + writeln!(v, "//");
@@ -176,7 +176,7 @@ error: literal with an empty format string
LL | writeln!(v, "{}", "/r");
| ^^^^
|
-help: try this
+help: try
|
LL - writeln!(v, "{}", "/r");
LL + writeln!(v, "/r");
diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml
index c40b71f6c..6856bb0ab 100644
--- a/src/tools/clippy/triagebot.toml
+++ b/src/tools/clippy/triagebot.toml
@@ -9,6 +9,9 @@ allow-unauthenticated = [
# See https://github.com/rust-lang/triagebot/wiki/Shortcuts
[shortcut]
+# Have rustbot inform users about the *No Merge Policy*
+[no-merges]
+
[autolabel."S-waiting-on-review"]
new_pr = true
@@ -27,4 +30,6 @@ contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIB
"@Alexendoo",
"@dswij",
"@Jarcho",
+ "@blyxyas",
+ "@Centri3",
]
diff --git a/src/tools/clippy/util/fetch_prs_between.sh b/src/tools/clippy/util/fetch_prs_between.sh
index 6865abf97..fa7560b69 100755
--- a/src/tools/clippy/util/fetch_prs_between.sh
+++ b/src/tools/clippy/util/fetch_prs_between.sh
@@ -6,15 +6,20 @@
# If you want to use this to update the Clippy changelog, be sure to manually
# exclude the non-user facing changes like 'rustup' PRs, typo fixes, etc.
-first=$1
-last=$2
+set -e
IFS='
'
-for pr in $(git log --oneline --grep "Merge #" --grep "Merge pull request" --grep "Auto merge of" --grep "Rollup merge of" "$first...$last" | sort -rn | uniq); do
+for pr in $(git log --oneline --merges --first-parent "$1...$2"); do
id=$(echo "$pr" | rg -o '#[0-9]{3,5}' | cut -c 2-)
commit=$(echo "$pr" | cut -d' ' -f 1)
message=$(git --no-pager show --pretty=medium "$commit")
+
+ if [[ -z "$newest_pr" ]]; then
+ newest_pr="$id"
+ fi
+ oldest_pr="$id"
+
if [[ -n $(echo "$message" | rg "^[\s]{4}changelog: [nN]one\.*$") ]]; then
continue
fi
@@ -25,3 +30,14 @@ for pr in $(git log --oneline --grep "Merge #" --grep "Merge pull request" --gre
echo "---------------------------------------------------------"
echo
done
+
+newest_merged_at="$(gh pr view -R rust-lang/rust-clippy --json mergedAt $newest_pr -q .mergedAt)"
+oldest_merged_at="$(gh pr view -R rust-lang/rust-clippy --json mergedAt $oldest_pr -q .mergedAt)"
+
+query="merged:$oldest_merged_at..$newest_merged_at base:master"
+encoded_query="$(echo $query | sed 's/ /+/g; s/:/%3A/g')"
+
+pr_link="https://github.com/rust-lang/rust-clippy/pulls?q=$encoded_query"
+count="$(gh api -X GET search/issues -f "q=$query repo:rust-lang/rust-clippy" -q .total_count)"
+
+echo "[View all $count merged pull requests]($pr_link)"
diff --git a/src/tools/collect-license-metadata/src/path_tree.rs b/src/tools/collect-license-metadata/src/path_tree.rs
index 68b6cef64..709d91897 100644
--- a/src/tools/collect-license-metadata/src/path_tree.rs
+++ b/src/tools/collect-license-metadata/src/path_tree.rs
@@ -155,7 +155,10 @@ impl Node<LicenseId> {
name: child_name,
children: child_children,
license: child_license,
- } = child else { continue };
+ } = child
+ else {
+ continue;
+ };
if child_license != license {
continue;
diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml
index d2f258320..ff1d5cecb 100644
--- a/src/tools/compiletest/Cargo.toml
+++ b/src/tools/compiletest/Cargo.toml
@@ -29,7 +29,7 @@ anyhow = "1"
libc = "0.2"
[target.'cfg(windows)'.dependencies]
-miow = "0.5"
+miow = "0.6"
[target.'cfg(windows)'.dependencies.windows]
version = "0.48.0"
diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs
index c835962ad..269d93843 100644
--- a/src/tools/compiletest/src/header.rs
+++ b/src/tools/compiletest/src/header.rs
@@ -6,7 +6,6 @@ use std::io::BufReader;
use std::path::{Path, PathBuf};
use std::process::Command;
-use build_helper::ci::CiEnv;
use tracing::*;
use crate::common::{Config, Debugger, FailMode, Mode, PassMode};
@@ -232,7 +231,7 @@ impl TestProps {
aux_builds: vec![],
aux_crates: vec![],
revisions: vec![],
- rustc_env: vec![],
+ rustc_env: vec![("RUSTC_ICE".to_string(), "0".to_string())],
unset_rustc_env: vec![],
exec_env: vec![],
unset_exec_env: vec![],
@@ -298,13 +297,6 @@ impl TestProps {
/// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`.
fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
- // In CI, we've sometimes encountered non-determinism related to truncating very long paths.
- // Set a consistent (short) prefix to avoid issues, but only in CI to avoid regressing the
- // contributor experience.
- if CiEnv::is_ci() {
- self.remap_src_base = config.mode == Mode::Ui && !config.suite.contains("rustdoc");
- }
-
let mut has_edition = false;
if !testfile.is_dir() {
let file = File::open(testfile).unwrap();
@@ -541,16 +533,15 @@ impl TestProps {
}
fn update_pass_mode(&mut self, ln: &str, revision: Option<&str>, config: &Config) {
- let check_no_run = |s| {
- if config.mode != Mode::Ui && config.mode != Mode::Incremental {
- panic!("`{}` header is only supported in UI and incremental tests", s);
- }
- if config.mode == Mode::Incremental
- && !revision.map_or(false, |r| r.starts_with("cfail"))
- && !self.revisions.iter().all(|r| r.starts_with("cfail"))
- {
- panic!("`{}` header is only supported in `cfail` incremental tests", s);
+ let check_no_run = |s| match (config.mode, s) {
+ (Mode::Ui, _) => (),
+ (Mode::Codegen, "build-pass") => (),
+ (Mode::Incremental, _) => {
+ if revision.is_some() && !self.revisions.iter().all(|r| r.starts_with("cfail")) {
+ panic!("`{s}` header is only supported in `cfail` incremental tests")
+ }
}
+ (mode, _) => panic!("`{s}` header is not supported in `{mode}` tests"),
};
let pass_mode = if config.parse_name_directive(ln, "check-pass") {
check_no_run("check-pass");
@@ -559,9 +550,7 @@ impl TestProps {
check_no_run("build-pass");
Some(PassMode::Build)
} else if config.parse_name_directive(ln, "run-pass") {
- if config.mode != Mode::Ui {
- panic!("`run-pass` header is only supported in UI tests")
- }
+ check_no_run("run-pass");
Some(PassMode::Run)
} else {
None
@@ -588,21 +577,25 @@ impl TestProps {
}
}
+/// Extract a `(Option<line_config>, directive)` directive from a line if comment is present.
pub fn line_directive<'line>(
comment: &str,
ln: &'line str,
) -> Option<(Option<&'line str>, &'line str)> {
+ let ln = ln.trim_start();
if ln.starts_with(comment) {
let ln = ln[comment.len()..].trim_start();
if ln.starts_with('[') {
// A comment like `//[foo]` is specific to revision `foo`
- if let Some(close_brace) = ln.find(']') {
- let lncfg = &ln[1..close_brace];
+ let Some(close_brace) = ln.find(']') else {
+ panic!(
+ "malformed condition directive: expected `{}[foo]`, found `{}`",
+ comment, ln
+ );
+ };
- Some((Some(lncfg), ln[(close_brace + 1)..].trim_start()))
- } else {
- panic!("malformed condition directive: expected `{}[foo]`, found `{}`", comment, ln)
- }
+ let lncfg = &ln[1..close_brace];
+ Some((Some(lncfg), ln[(close_brace + 1)..].trim_start()))
} else {
Some((None, ln))
}
diff --git a/src/tools/compiletest/src/header/cfg.rs b/src/tools/compiletest/src/header/cfg.rs
index 86a749b93..77c2866b3 100644
--- a/src/tools/compiletest/src/header/cfg.rs
+++ b/src/tools/compiletest/src/header/cfg.rs
@@ -112,7 +112,7 @@ pub(super) fn parse_cfg_name_directive<'a>(
(config.target == "wasm32-unknown-unknown").then_some("emscripten"),
],
allowed_names: &target_cfgs.all_oses,
- message: "when the operative system is {name}"
+ message: "when the operating system is {name}"
}
condition! {
name: &target_cfg.env,
@@ -122,7 +122,7 @@ pub(super) fn parse_cfg_name_directive<'a>(
condition! {
name: &target_cfg.os_and_env(),
allowed_names: &target_cfgs.all_oses_and_envs,
- message: "when the operative system and target environment are {name}"
+ message: "when the operating system and target environment are {name}"
}
condition! {
name: &target_cfg.abi,
diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs
index fc48d0159..1a765477f 100644
--- a/src/tools/compiletest/src/lib.rs
+++ b/src/tools/compiletest/src/lib.rs
@@ -1119,7 +1119,7 @@ fn check_overlapping_tests(found_paths: &BTreeSet<PathBuf>) {
for path in found_paths {
for ancestor in path.ancestors().skip(1) {
if found_paths.contains(ancestor) {
- collisions.push((path, ancestor.clone()));
+ collisions.push((path, ancestor));
}
}
}
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
index 672779325..4ef79af31 100644
--- a/src/tools/compiletest/src/runtest.rs
+++ b/src/tools/compiletest/src/runtest.rs
@@ -18,6 +18,7 @@ use crate::ColorConfig;
use regex::{Captures, Regex};
use rustfix::{apply_suggestions, get_suggestions_from_json, Filter};
+use std::borrow::Cow;
use std::collections::hash_map::DefaultHasher;
use std::collections::{HashMap, HashSet};
use std::env;
@@ -41,7 +42,7 @@ use crate::extract_gdb_version;
use crate::is_android_gdb_target;
mod debugger;
-use debugger::{check_debugger_output, DebuggerCommands};
+use debugger::DebuggerCommands;
#[cfg(test)]
mod tests;
@@ -664,6 +665,7 @@ impl<'test> TestCx<'test> {
fn normalize_coverage_output(&self, coverage: &str) -> Result<String, String> {
let normalized = self.normalize_output(coverage, &[]);
+ let normalized = Self::anonymize_coverage_line_numbers(&normalized);
let mut lines = normalized.lines().collect::<Vec<_>>();
@@ -674,6 +676,21 @@ impl<'test> TestCx<'test> {
Ok(joined_lines)
}
+ /// Replace line numbers in coverage reports with the placeholder `LL`,
+ /// so that the tests are less sensitive to lines being added/removed.
+ fn anonymize_coverage_line_numbers(coverage: &str) -> Cow<'_, str> {
+ // The coverage reporter prints line numbers at the start of a line.
+ // They are truncated or left-padded to occupy exactly 5 columns.
+ // (`LineNumberColumnWidth` in `SourceCoverageViewText.cpp`.)
+ // A pipe character `|` appears immediately after the final digit.
+ //
+ // Line numbers that appear inside expansion/instantiation subviews
+ // have an additional prefix of ` |` for each nesting level.
+ static LINE_NUMBER_RE: Lazy<Regex> =
+ Lazy::new(|| Regex::new(r"(?m:^)(?<prefix>(?: \|)*) *[0-9]+\|").unwrap());
+ LINE_NUMBER_RE.replace_all(coverage, "$prefix LL|")
+ }
+
/// Coverage reports can describe multiple source files, separated by
/// blank lines. The order of these files is unpredictable (since it
/// depends on implementation details), so we need to sort the file
@@ -868,6 +885,8 @@ impl<'test> TestCx<'test> {
.args(&["--target", &self.config.target])
.arg("-L")
.arg(&aux_dir)
+ .arg("-A")
+ .arg("internal_features")
.args(&self.props.compile_flags)
.envs(self.props.rustc_env.clone());
self.maybe_add_external_args(&mut rustc, &self.config.target_rustcflags);
@@ -936,7 +955,9 @@ impl<'test> TestCx<'test> {
.arg("-L")
.arg(&self.config.build_base)
.arg("-L")
- .arg(aux_dir);
+ .arg(aux_dir)
+ .arg("-A")
+ .arg("internal_features");
self.set_revision_flags(&mut rustc);
self.maybe_add_external_args(&mut rustc, &self.config.target_rustcflags);
rustc.args(&self.props.compile_flags);
@@ -997,16 +1018,13 @@ impl<'test> TestCx<'test> {
};
// Parse debugger commands etc from test files
- let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } =
- match DebuggerCommands::parse_from(
- &self.testpaths.file,
- self.config,
- prefixes,
- self.revision,
- ) {
- Ok(cmds) => cmds,
- Err(e) => self.fatal(&e),
- };
+ let dbg_cmds = DebuggerCommands::parse_from(
+ &self.testpaths.file,
+ self.config,
+ prefixes,
+ self.revision,
+ )
+ .unwrap_or_else(|e| self.fatal(&e));
// https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/debugger-commands
let mut script_str = String::with_capacity(2048);
@@ -1023,12 +1041,12 @@ impl<'test> TestCx<'test> {
// Set breakpoints on every line that contains the string "#break"
let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
- for line in &breakpoint_lines {
+ for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(&format!("bp `{}:{}`\n", source_file_name, line));
}
// Append the other `cdb-command:`s
- for line in &commands {
+ for line in &dbg_cmds.commands {
script_str.push_str(line);
script_str.push_str("\n");
}
@@ -1058,7 +1076,7 @@ impl<'test> TestCx<'test> {
self.fatal_proc_rec("Error while running CDB", &debugger_run_result);
}
- if let Err(e) = check_debugger_output(&debugger_run_result, &check_lines) {
+ if let Err(e) = dbg_cmds.check_output(&debugger_run_result) {
self.fatal_proc_rec(&e, &debugger_run_result);
}
}
@@ -1088,17 +1106,14 @@ impl<'test> TestCx<'test> {
PREFIXES
};
- let DebuggerCommands { commands, check_lines, breakpoint_lines } =
- match DebuggerCommands::parse_from(
- &self.testpaths.file,
- self.config,
- prefixes,
- self.revision,
- ) {
- Ok(cmds) => cmds,
- Err(e) => self.fatal(&e),
- };
- let mut cmds = commands.join("\n");
+ let dbg_cmds = DebuggerCommands::parse_from(
+ &self.testpaths.file,
+ self.config,
+ prefixes,
+ self.revision,
+ )
+ .unwrap_or_else(|e| self.fatal(&e));
+ let mut cmds = dbg_cmds.commands.join("\n");
// compile test file (it should have 'compile-flags:-g' in the header)
let should_run = self.run_if_enabled();
@@ -1132,13 +1147,14 @@ impl<'test> TestCx<'test> {
./{}/stage2/lib/rustlib/{}/lib/\n",
self.config.host, self.config.target
));
- for line in &breakpoint_lines {
+ for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(
- &format!(
+ format!(
"break {:?}:{}\n",
self.testpaths.file.file_name().unwrap().to_string_lossy(),
*line
- )[..],
+ )
+ .as_str(),
);
}
script_str.push_str(&cmds);
@@ -1279,7 +1295,7 @@ impl<'test> TestCx<'test> {
}
// Add line breakpoints
- for line in &breakpoint_lines {
+ for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(&format!(
"break '{}':{}\n",
self.testpaths.file.file_name().unwrap().to_string_lossy(),
@@ -1315,7 +1331,7 @@ impl<'test> TestCx<'test> {
self.fatal_proc_rec("gdb failed to execute", &debugger_run_result);
}
- if let Err(e) = check_debugger_output(&debugger_run_result, &check_lines) {
+ if let Err(e) = dbg_cmds.check_output(&debugger_run_result) {
self.fatal_proc_rec(&e, &debugger_run_result);
}
}
@@ -1372,16 +1388,13 @@ impl<'test> TestCx<'test> {
};
// Parse debugger commands etc from test files
- let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } =
- match DebuggerCommands::parse_from(
- &self.testpaths.file,
- self.config,
- prefixes,
- self.revision,
- ) {
- Ok(cmds) => cmds,
- Err(e) => self.fatal(&e),
- };
+ let dbg_cmds = DebuggerCommands::parse_from(
+ &self.testpaths.file,
+ self.config,
+ prefixes,
+ self.revision,
+ )
+ .unwrap_or_else(|e| self.fatal(&e));
// Write debugger script:
// We don't want to hang when calling `quit` while the process is still running
@@ -1430,7 +1443,7 @@ impl<'test> TestCx<'test> {
// Set breakpoints on every line that contains the string "#break"
let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
- for line in &breakpoint_lines {
+ for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(&format!(
"breakpoint set --file '{}' --line {}\n",
source_file_name, line
@@ -1438,7 +1451,7 @@ impl<'test> TestCx<'test> {
}
// Append the other commands
- for line in &commands {
+ for line in &dbg_cmds.commands {
script_str.push_str(line);
script_str.push_str("\n");
}
@@ -1458,7 +1471,7 @@ impl<'test> TestCx<'test> {
self.fatal_proc_rec("Error while running LLDB", &debugger_run_result);
}
- if let Err(e) = check_debugger_output(&debugger_run_result, &check_lines) {
+ if let Err(e) = dbg_cmds.check_output(&debugger_run_result) {
self.fatal_proc_rec(&e, &debugger_run_result);
}
}
@@ -1649,7 +1662,7 @@ impl<'test> TestCx<'test> {
if self.props.known_bug {
if !expected_errors.is_empty() {
self.fatal_proc_rec(
- "`known_bug` tests should not have an expected errors",
+ "`known_bug` tests should not have an expected error",
proc_res,
);
}
@@ -1875,6 +1888,8 @@ impl<'test> TestCx<'test> {
.arg("--deny")
.arg("warnings")
.arg(&self.testpaths.file)
+ .arg("-A")
+ .arg("internal_features")
.args(&self.props.compile_flags);
if self.config.mode == RustdocJson {
@@ -1941,7 +1956,8 @@ impl<'test> TestCx<'test> {
let mut test_client =
Command::new(self.config.remote_test_client.as_ref().unwrap());
test_client
- .args(&["run", &support_libs.len().to_string(), &prog])
+ .args(&["run", &support_libs.len().to_string()])
+ .arg(&prog)
.args(support_libs)
.args(args);
@@ -2338,6 +2354,14 @@ impl<'test> TestCx<'test> {
// Hide line numbers to reduce churn
rustc.arg("-Zui-testing");
rustc.arg("-Zdeduplicate-diagnostics=no");
+ // #[cfg(not(bootstrap)] unconditionally pass flag after beta bump
+ // since `ui-fulldeps --stage=1` builds using the stage 0 compiler,
+ // which doesn't have this flag.
+ if !(self.config.stage_id.starts_with("stage1-")
+ && self.config.suite == "ui-fulldeps")
+ {
+ rustc.arg("-Zwrite-long-types-to-disk=no");
+ }
// FIXME: use this for other modes too, for perf?
rustc.arg("-Cstrip=debuginfo");
}
@@ -2459,6 +2483,14 @@ impl<'test> TestCx<'test> {
rustc.args(&["-A", "unused"]);
}
+ // #[cfg(not(bootstrap)] unconditionally pass flag after beta bump
+ // since `ui-fulldeps --stage=1` builds using the stage 0 compiler,
+ // which doesn't have this lint.
+ if !(self.config.stage_id.starts_with("stage1-") && self.config.suite == "ui-fulldeps") {
+ // Allow tests to use internal features.
+ rustc.args(&["-A", "internal_features"]);
+ }
+
if self.props.force_host {
self.maybe_add_external_args(&mut rustc, &self.config.host_rustcflags);
if !is_rustdoc {
@@ -2516,7 +2548,7 @@ impl<'test> TestCx<'test> {
// If this is emscripten, then run tests under nodejs
if self.config.target.contains("emscripten") {
if let Some(ref p) = self.config.nodejs {
- args.push(p.clone());
+ args.push(p.into());
} else {
self.fatal("emscripten target requested and no NodeJS binary found (--nodejs)");
}
@@ -2524,7 +2556,7 @@ impl<'test> TestCx<'test> {
// shim
} else if self.config.target.contains("wasm32") {
if let Some(ref p) = self.config.nodejs {
- args.push(p.clone());
+ args.push(p.into());
} else {
self.fatal("wasm32 target requested and no NodeJS binary found (--nodejs)");
}
@@ -2536,13 +2568,12 @@ impl<'test> TestCx<'test> {
.unwrap() // chop off `ui`
.parent()
.unwrap(); // chop off `tests`
- args.push(src.join("src/etc/wasm32-shim.js").display().to_string());
+ args.push(src.join("src/etc/wasm32-shim.js").into_os_string());
}
let exe_file = self.make_exe_name();
- // FIXME (#9639): This needs to handle non-utf8 paths
- args.push(exe_file.to_str().unwrap().to_owned());
+ args.push(exe_file.into_os_string());
// Add the arguments in the run_flags directive
args.extend(self.split_maybe_args(&self.props.run_flags));
@@ -2551,12 +2582,16 @@ impl<'test> TestCx<'test> {
ProcArgs { prog, args }
}
- fn split_maybe_args(&self, argstr: &Option<String>) -> Vec<String> {
+ fn split_maybe_args(&self, argstr: &Option<String>) -> Vec<OsString> {
match *argstr {
Some(ref s) => s
.split(' ')
.filter_map(|s| {
- if s.chars().all(|c| c.is_whitespace()) { None } else { Some(s.to_owned()) }
+ if s.chars().all(|c| c.is_whitespace()) {
+ None
+ } else {
+ Some(OsString::from(s))
+ }
})
.collect(),
None => Vec::new(),
@@ -2760,6 +2795,10 @@ impl<'test> TestCx<'test> {
self.fatal_proc_rec("compilation failed!", &proc_res);
}
+ if let Some(PassMode::Build) = self.pass_mode() {
+ return;
+ }
+
let output_path = self.output_base_name().with_extension("ll");
let proc_res = self.verify_with_filecheck(&output_path);
if !proc_res.status.success() {
@@ -4153,8 +4192,8 @@ impl<'test> TestCx<'test> {
# Match paths that don't include spaces.
(?:\\[\pL\pN\.\-_']+)+\.\pL+
|
- # If the path starts with a well-known root, then allow spaces.
- \$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_' ]+)+
+ # If the path starts with a well-known root, then allow spaces and no file extension.
+ \$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_'\ ]+)+
)"#,
)
.unwrap()
@@ -4359,8 +4398,8 @@ impl<'test> TestCx<'test> {
}
struct ProcArgs {
- prog: String,
- args: Vec<String>,
+ prog: OsString,
+ args: Vec<OsString>,
}
pub struct ProcRes {
diff --git a/src/tools/compiletest/src/runtest/debugger.rs b/src/tools/compiletest/src/runtest/debugger.rs
index 379ff0bab..eebe5f358 100644
--- a/src/tools/compiletest/src/runtest/debugger.rs
+++ b/src/tools/compiletest/src/runtest/debugger.rs
@@ -2,18 +2,25 @@ use crate::common::Config;
use crate::header::line_directive;
use crate::runtest::ProcRes;
+use std::fmt::Write;
use std::fs::File;
use std::io::{BufRead, BufReader};
-use std::path::Path;
+use std::path::{Path, PathBuf};
+/// Representation of information to invoke a debugger and check its output
pub(super) struct DebuggerCommands {
+ /// Commands for the debuuger
pub commands: Vec<String>,
- pub check_lines: Vec<String>,
+ /// Lines to insert breakpoints at
pub breakpoint_lines: Vec<usize>,
+ /// Contains the source line number to check and the line itself
+ check_lines: Vec<(usize, String)>,
+ /// Source file name
+ file: PathBuf,
}
impl DebuggerCommands {
- pub(super) fn parse_from(
+ pub fn parse_from(
file: &Path,
config: &Config,
debugger_prefixes: &[&str],
@@ -21,7 +28,7 @@ impl DebuggerCommands {
) -> Result<Self, String> {
let directives = debugger_prefixes
.iter()
- .map(|prefix| (format!("{}-command", prefix), format!("{}-check", prefix)))
+ .map(|prefix| (format!("{prefix}-command"), format!("{prefix}-check")))
.collect::<Vec<_>>();
let mut breakpoint_lines = vec![];
@@ -29,63 +36,88 @@ impl DebuggerCommands {
let mut check_lines = vec![];
let mut counter = 0;
let reader = BufReader::new(File::open(file).unwrap());
- for line in reader.lines() {
+ for (line_no, line) in reader.lines().enumerate() {
counter += 1;
- match line {
- Ok(line) => {
- let (lnrev, line) = line_directive("//", &line).unwrap_or((None, &line));
-
- // Skip any revision specific directive that doesn't match the current
- // revision being tested
- if lnrev.is_some() && lnrev != rev {
- continue;
- }
-
- if line.contains("#break") {
- breakpoint_lines.push(counter);
- }
-
- for &(ref command_directive, ref check_directive) in &directives {
- config
- .parse_name_value_directive(&line, command_directive)
- .map(|cmd| commands.push(cmd));
-
- config
- .parse_name_value_directive(&line, check_directive)
- .map(|cmd| check_lines.push(cmd));
- }
- }
- Err(e) => return Err(format!("Error while parsing debugger commands: {}", e)),
+ let line = line.map_err(|e| format!("Error while parsing debugger commands: {}", e))?;
+ let (lnrev, line) = line_directive("//", &line).unwrap_or((None, &line));
+
+ // Skip any revision specific directive that doesn't match the current
+ // revision being tested
+ if lnrev.is_some() && lnrev != rev {
+ continue;
+ }
+
+ if line.contains("#break") {
+ breakpoint_lines.push(counter);
+ }
+
+ for &(ref command_directive, ref check_directive) in &directives {
+ config
+ .parse_name_value_directive(&line, command_directive)
+ .map(|cmd| commands.push(cmd));
+
+ config
+ .parse_name_value_directive(&line, check_directive)
+ .map(|cmd| check_lines.push((line_no, cmd)));
}
}
- Ok(Self { commands, check_lines, breakpoint_lines })
+ Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_owned() })
}
-}
-pub(super) fn check_debugger_output(
- debugger_run_result: &ProcRes,
- check_lines: &[String],
-) -> Result<(), String> {
- let num_check_lines = check_lines.len();
-
- let mut check_line_index = 0;
- for line in debugger_run_result.stdout.lines() {
- if check_line_index >= num_check_lines {
- break;
+ /// Given debugger output and lines to check, ensure that every line is
+ /// contained in the debugger output. The check lines need to be found in
+ /// order, but there can be extra lines between.
+ pub fn check_output(&self, debugger_run_result: &ProcRes) -> Result<(), String> {
+ // (src_lineno, ck_line) that we did find
+ let mut found = vec![];
+ // (src_lineno, ck_line) that we couldn't find
+ let mut missing = vec![];
+ // We can find our any current match anywhere after our last match
+ let mut last_idx = 0;
+ let dbg_lines: Vec<&str> = debugger_run_result.stdout.lines().collect();
+
+ for (src_lineno, ck_line) in &self.check_lines {
+ if let Some(offset) = dbg_lines
+ .iter()
+ .skip(last_idx)
+ .position(|out_line| check_single_line(out_line, &ck_line))
+ {
+ last_idx += offset;
+ found.push((src_lineno, dbg_lines[last_idx]));
+ } else {
+ missing.push((src_lineno, ck_line));
+ }
}
- if check_single_line(line, &(check_lines[check_line_index])[..]) {
- check_line_index += 1;
+ if missing.is_empty() {
+ Ok(())
+ } else {
+ let fname = self.file.file_name().unwrap().to_string_lossy();
+ let mut msg = format!(
+ "check directive(s) from `{}` not found in debugger output. errors:",
+ self.file.display()
+ );
+
+ for (src_lineno, err_line) in missing {
+ write!(msg, "\n ({fname}:{num}) `{err_line}`", num = src_lineno + 1).unwrap();
+ }
+
+ if !found.is_empty() {
+ let init = "\nthe following subset of check directive(s) was found successfully:";
+ msg.push_str(init);
+ for (src_lineno, found_line) in found {
+ write!(msg, "\n ({fname}:{num}) `{found_line}`", num = src_lineno + 1)
+ .unwrap();
+ }
+ }
+
+ Err(msg)
}
}
- if check_line_index != num_check_lines && num_check_lines > 0 {
- Err(format!("line not found in debugger output: {}", check_lines[check_line_index]))
- } else {
- Ok(())
- }
}
+/// Check that the pattern in `check_line` applies to `line`. Returns `true` if they do match.
fn check_single_line(line: &str, check_line: &str) -> bool {
// Allow check lines to leave parts unspecified (e.g., uninitialized
// bits in the wrong case of an enum) with the notation "[...]".
@@ -101,21 +133,19 @@ fn check_single_line(line: &str, check_line: &str) -> bool {
}
let (mut rest, first_fragment) = if can_start_anywhere {
- match line.find(check_fragments[0]) {
- Some(pos) => (&line[pos + check_fragments[0].len()..], 1),
- None => return false,
- }
+ let Some(pos) = line.find(check_fragments[0]) else {
+ return false;
+ };
+ (&line[pos + check_fragments[0].len()..], 1)
} else {
(line, 0)
};
for current_fragment in &check_fragments[first_fragment..] {
- match rest.find(current_fragment) {
- Some(pos) => {
- rest = &rest[pos + current_fragment.len()..];
- }
- None => return false,
- }
+ let Some(pos) = rest.find(current_fragment) else {
+ return false;
+ };
+ rest = &rest[pos + current_fragment.len()..];
}
if !can_end_anywhere && !rest.is_empty() { false } else { true }
diff --git a/src/tools/compiletest/src/runtest/tests.rs b/src/tools/compiletest/src/runtest/tests.rs
index 511051111..fb3dd326a 100644
--- a/src/tools/compiletest/src/runtest/tests.rs
+++ b/src/tools/compiletest/src/runtest/tests.rs
@@ -8,8 +8,8 @@ fn normalize_platform_differences() {
"$BUILD_DIR/../parser.rs"
);
assert_eq!(
- TestCx::normalize_platform_differences(r"$DIR\bar.rs hello\nworld"),
- r"$DIR/bar.rs hello\nworld"
+ TestCx::normalize_platform_differences(r"$DIR\bar.rs: hello\nworld"),
+ r"$DIR/bar.rs: hello\nworld"
);
assert_eq!(
TestCx::normalize_platform_differences(r"either bar\baz.rs or bar\baz\mod.rs"),
@@ -27,8 +27,8 @@ fn normalize_platform_differences() {
);
assert_eq!(TestCx::normalize_platform_differences(r"$DIR\foo.rs:12:11"), "$DIR/foo.rs:12:11",);
assert_eq!(
- TestCx::normalize_platform_differences(r"$DIR\path with spaces 'n' quotes"),
- "$DIR/path with spaces 'n' quotes",
+ TestCx::normalize_platform_differences(r"$DIR\path with\spaces 'n' quotes"),
+ "$DIR/path with/spaces 'n' quotes",
);
assert_eq!(
TestCx::normalize_platform_differences(r"$DIR\file_with\no_extension"),
diff --git a/src/tools/jsondocck/src/cache.rs b/src/tools/jsondocck/src/cache.rs
index f9e542327..50697d46b 100644
--- a/src/tools/jsondocck/src/cache.rs
+++ b/src/tools/jsondocck/src/cache.rs
@@ -15,8 +15,10 @@ impl Cache {
/// Create a new cache, used to read files only once and otherwise store their contents.
pub fn new(config: &Config) -> Cache {
let root = Path::new(&config.doc_dir);
- let filename = Path::new(&config.template).file_stem().unwrap();
- let file_path = root.join(&Path::with_extension(Path::new(filename), "json"));
+ // `filename` needs to replace `-` with `_` to be sure the JSON path will always be valid.
+ let filename =
+ Path::new(&config.template).file_stem().unwrap().to_str().unwrap().replace('-', "_");
+ let file_path = root.join(&Path::with_extension(Path::new(&filename), "json"));
let content = fs::read_to_string(&file_path).expect("failed to read JSON file");
Cache {
diff --git a/src/tools/jsondoclint/src/main.rs b/src/tools/jsondoclint/src/main.rs
index ee163ddfd..aaaba78cb 100644
--- a/src/tools/jsondoclint/src/main.rs
+++ b/src/tools/jsondoclint/src/main.rs
@@ -1,4 +1,5 @@
use std::io::{BufWriter, Write};
+use std::path::{Path, PathBuf};
use anyhow::{bail, Result};
use clap::Parser;
@@ -25,7 +26,7 @@ enum ErrorKind {
#[derive(Debug, Serialize)]
struct JsonOutput {
- path: String,
+ path: PathBuf,
errors: Vec<Error>,
}
@@ -45,6 +46,12 @@ struct Cli {
fn main() -> Result<()> {
let Cli { path, verbose, json_output } = Cli::parse();
+ // We convert `-` into `_` for the file name to be sure the JSON path will always be correct.
+ let path = Path::new(&path);
+ let filename = path.file_name().unwrap().to_str().unwrap().replace('-', "_");
+ let parent = path.parent().unwrap();
+ let path = parent.join(&filename);
+
let contents = fs::read_to_string(&path)?;
let krate: Crate = serde_json::from_str(&contents)?;
assert_eq!(krate.format_version, FORMAT_VERSION);
@@ -101,7 +108,7 @@ fn main() -> Result<()> {
ErrorKind::Custom(msg) => eprintln!("{}: {}", err.id.0, msg),
}
}
- bail!("Errors validating json {path}");
+ bail!("Errors validating json {}", path.display());
}
Ok(())
diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs
index c8a370085..7f73cac63 100644
--- a/src/tools/linkchecker/main.rs
+++ b/src/tools/linkchecker/main.rs
@@ -368,7 +368,6 @@ impl Checker {
return;
}
// Search for intra-doc links that rustdoc didn't warn about
- // FIXME(#77199, 77200) Rustdoc should just warn about these directly.
// NOTE: only looks at one line at a time; in practice this should find most links
for (i, line) in source.lines().enumerate() {
for broken_link in BROKEN_INTRA_DOC_LINK.captures_iter(line) {
diff --git a/src/tools/opt-dist/Cargo.toml b/src/tools/opt-dist/Cargo.toml
new file mode 100644
index 000000000..3f7dba81c
--- /dev/null
+++ b/src/tools/opt-dist/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "opt-dist"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+build_helper = { path = "../build_helper" }
+env_logger = "0.10"
+log = "0.4"
+anyhow = { version = "1", features = ["backtrace"] }
+humantime = "2"
+humansize = "2"
+sysinfo = { version = "0.29", default-features = false }
+fs_extra = "1"
+camino = "1"
+reqwest = { version = "0.11", features = ["blocking"] }
+zip = { version = "0.6", default-features = false, features = ["deflate"] }
+tar = "0.4"
+xz = "0.1"
+serde = { version = "1", features = ["derive"] }
+serde_json = "1"
+glob = "0.3"
+tempfile = "3.5"
diff --git a/src/tools/opt-dist/README.md b/src/tools/opt-dist/README.md
new file mode 100644
index 000000000..05a75870d
--- /dev/null
+++ b/src/tools/opt-dist/README.md
@@ -0,0 +1,7 @@
+# Optimized build pipeline
+This binary implements a heavily optimized build pipeline for `rustc` and `LLVM` artifacts that are used for both for
+benchmarking using the perf. bot and for final distribution to users.
+
+It uses LTO, PGO and BOLT to optimize the compiler and LLVM as much as possible.
+This logic is not part of bootstrap, because it needs to invoke bootstrap multiple times, force-rebuild various
+artifacts repeatedly and sometimes go around bootstrap's cache mechanism.
diff --git a/src/tools/opt-dist/src/bolt.rs b/src/tools/opt-dist/src/bolt.rs
new file mode 100644
index 000000000..cf9f4fabc
--- /dev/null
+++ b/src/tools/opt-dist/src/bolt.rs
@@ -0,0 +1,103 @@
+use anyhow::Context;
+
+use crate::exec::cmd;
+use crate::training::LlvmBoltProfile;
+use camino::{Utf8Path, Utf8PathBuf};
+
+use crate::utils::io::copy_file;
+
+/// Instruments an artifact at the given `path` (in-place) with BOLT and then calls `func`.
+/// After this function finishes, the original file will be restored.
+pub fn with_bolt_instrumented<F: FnOnce() -> anyhow::Result<R>, R>(
+ path: &Utf8Path,
+ func: F,
+) -> anyhow::Result<R> {
+ // Back up the original file.
+ // It will be restored to its original state when this function exits.
+ // By copying it, we break any existing hard links, so that they are not affected by the
+ // instrumentation.
+ let _backup_file = BackedUpFile::new(path)?;
+
+ let instrumented_path = tempfile::NamedTempFile::new()?.into_temp_path();
+
+ // Instrument the original file with BOLT, saving the result into `instrumented_path`
+ cmd(&["llvm-bolt"])
+ .arg("-instrument")
+ .arg(path)
+ // Make sure that each process will write its profiles into a separate file
+ .arg("--instrumentation-file-append-pid")
+ .arg("-o")
+ .arg(instrumented_path.display())
+ .run()
+ .with_context(|| anyhow::anyhow!("Could not instrument {path} using BOLT"))?;
+
+ // Copy the instrumented artifact over the original one
+ copy_file(&instrumented_path, path)?;
+
+ // Run the function that will make use of the instrumented artifact.
+ // The original file will be restored when `_backup_file` is dropped.
+ func()
+}
+
+/// Optimizes the file at `path` with BOLT in-place using the given `profile`.
+pub fn bolt_optimize(path: &Utf8Path, profile: &LlvmBoltProfile) -> anyhow::Result<()> {
+ // Copy the artifact to a new location, so that we do not use the same input and output file.
+ // BOLT cannot handle optimizing when the input and output is the same file, because it performs
+ // in-place patching.
+ let temp_path = tempfile::NamedTempFile::new()?.into_temp_path();
+ copy_file(path, &temp_path)?;
+
+ cmd(&["llvm-bolt"])
+ .arg(temp_path.display())
+ .arg("-data")
+ .arg(&profile.0)
+ .arg("-o")
+ .arg(path)
+ // Reorder basic blocks within functions
+ .arg("-reorder-blocks=ext-tsp")
+ // Reorder functions within the binary
+ .arg("-reorder-functions=hfsort+")
+ // Split function code into hot and code regions
+ .arg("-split-functions")
+ // Split as many basic blocks as possible
+ .arg("-split-all-cold")
+ // Move jump tables to a separate section
+ .arg("-jump-tables=move")
+ // Fold functions with identical code
+ .arg("-icf=1")
+ // The following flag saves about 50 MiB of libLLVM.so size.
+ // However, it succeeds very non-deterministically. To avoid frequent artifact size swings,
+ // it is kept disabled for now.
+ // FIXME(kobzol): try to re-enable this once BOLT in-place rewriting is merged or after
+ // we bump LLVM.
+ // Try to reuse old text segments to reduce binary size
+ // .arg("--use-old-text")
+ // Update DWARF debug info in the final binary
+ .arg("-update-debug-sections")
+ // Print optimization statistics
+ .arg("-dyno-stats")
+ .run()
+ .with_context(|| anyhow::anyhow!("Could not optimize {path} with BOLT"))?;
+
+ Ok(())
+}
+
+/// Copies a file to a temporary location and restores it (copies it back) when it is dropped.
+pub struct BackedUpFile {
+ original: Utf8PathBuf,
+ backup: tempfile::TempPath,
+}
+
+impl BackedUpFile {
+ pub fn new(file: &Utf8Path) -> anyhow::Result<Self> {
+ let temp_path = tempfile::NamedTempFile::new()?.into_temp_path();
+ copy_file(file, &temp_path)?;
+ Ok(Self { backup: temp_path, original: file.to_path_buf() })
+ }
+}
+
+impl Drop for BackedUpFile {
+ fn drop(&mut self) {
+ copy_file(&self.backup, &self.original).expect("Cannot restore backed up file");
+ }
+}
diff --git a/src/tools/opt-dist/src/environment/linux.rs b/src/tools/opt-dist/src/environment/linux.rs
new file mode 100644
index 000000000..58b7e6d23
--- /dev/null
+++ b/src/tools/opt-dist/src/environment/linux.rs
@@ -0,0 +1,58 @@
+use crate::environment::Environment;
+use crate::exec::cmd;
+use crate::utils::io::copy_directory;
+use camino::{Utf8Path, Utf8PathBuf};
+
+pub(super) struct LinuxEnvironment;
+
+impl Environment for LinuxEnvironment {
+ fn python_binary(&self) -> &'static str {
+ "python3"
+ }
+
+ fn checkout_path(&self) -> Utf8PathBuf {
+ Utf8PathBuf::from("/checkout")
+ }
+
+ fn host_llvm_dir(&self) -> Utf8PathBuf {
+ Utf8PathBuf::from("/rustroot")
+ }
+
+ fn opt_artifacts(&self) -> Utf8PathBuf {
+ Utf8PathBuf::from("/tmp/tmp-multistage/opt-artifacts")
+ }
+
+ fn build_root(&self) -> Utf8PathBuf {
+ self.checkout_path().join("obj")
+ }
+
+ fn prepare_rustc_perf(&self) -> anyhow::Result<()> {
+ // /tmp/rustc-perf comes from the x64 dist Dockerfile
+ copy_directory(Utf8Path::new("/tmp/rustc-perf"), &self.rustc_perf_dir())?;
+ cmd(&[self.cargo_stage_0().as_str(), "build", "-p", "collector"])
+ .workdir(&self.rustc_perf_dir())
+ .env("RUSTC", &self.rustc_stage_0().into_string())
+ .env("RUSTC_BOOTSTRAP", "1")
+ .run()?;
+ Ok(())
+ }
+
+ fn supports_bolt(&self) -> bool {
+ true
+ }
+
+ fn supports_shared_llvm(&self) -> bool {
+ true
+ }
+
+ fn executable_extension(&self) -> &'static str {
+ ""
+ }
+
+ fn skipped_tests(&self) -> &'static [&'static str] {
+ &[
+ // Fails because of linker errors, as of June 2023.
+ "tests/ui/process/nofile-limit.rs",
+ ]
+ }
+}
diff --git a/src/tools/opt-dist/src/environment/mod.rs b/src/tools/opt-dist/src/environment/mod.rs
new file mode 100644
index 000000000..a8650fad0
--- /dev/null
+++ b/src/tools/opt-dist/src/environment/mod.rs
@@ -0,0 +1,77 @@
+use camino::Utf8PathBuf;
+
+#[cfg(target_family = "unix")]
+mod linux;
+#[cfg(target_family = "windows")]
+mod windows;
+
+pub trait Environment {
+ fn host_triple(&self) -> String {
+ std::env::var("PGO_HOST").expect("PGO_HOST environment variable missing")
+ }
+
+ fn python_binary(&self) -> &'static str;
+
+ /// The rustc checkout, where the compiler source is located.
+ fn checkout_path(&self) -> Utf8PathBuf;
+
+ /// Path to the host LLVM used to compile LLVM in `src/llvm-project`.
+ fn host_llvm_dir(&self) -> Utf8PathBuf;
+
+ /// Directory where the optimization artifacts (PGO/BOLT profiles, etc.)
+ /// will be stored.
+ fn opt_artifacts(&self) -> Utf8PathBuf;
+
+ /// The main directory where the build occurs.
+ fn build_root(&self) -> Utf8PathBuf;
+
+ fn build_artifacts(&self) -> Utf8PathBuf {
+ self.build_root().join("build").join(self.host_triple())
+ }
+
+ fn cargo_stage_0(&self) -> Utf8PathBuf {
+ self.build_artifacts()
+ .join("stage0")
+ .join("bin")
+ .join(format!("cargo{}", self.executable_extension()))
+ }
+
+ fn rustc_stage_0(&self) -> Utf8PathBuf {
+ self.build_artifacts()
+ .join("stage0")
+ .join("bin")
+ .join(format!("rustc{}", self.executable_extension()))
+ }
+
+ fn rustc_stage_2(&self) -> Utf8PathBuf {
+ self.build_artifacts()
+ .join("stage2")
+ .join("bin")
+ .join(format!("rustc{}", self.executable_extension()))
+ }
+
+ /// Path to the built rustc-perf benchmark suite.
+ fn rustc_perf_dir(&self) -> Utf8PathBuf {
+ self.opt_artifacts().join("rustc-perf")
+ }
+
+ /// Download and/or compile rustc-perf.
+ fn prepare_rustc_perf(&self) -> anyhow::Result<()>;
+
+ fn supports_bolt(&self) -> bool;
+
+ fn supports_shared_llvm(&self) -> bool;
+
+ /// What is the extension of binary executables in this environment?
+ fn executable_extension(&self) -> &'static str;
+
+ /// List of test paths that should be skipped when testing the optimized artifacts.
+ fn skipped_tests(&self) -> &'static [&'static str];
+}
+
+pub fn create_environment() -> Box<dyn Environment> {
+ #[cfg(target_family = "unix")]
+ return Box::new(linux::LinuxEnvironment);
+ #[cfg(target_family = "windows")]
+ return Box::new(windows::WindowsEnvironment::new());
+}
diff --git a/src/tools/opt-dist/src/environment/windows.rs b/src/tools/opt-dist/src/environment/windows.rs
new file mode 100644
index 000000000..8a9733d64
--- /dev/null
+++ b/src/tools/opt-dist/src/environment/windows.rs
@@ -0,0 +1,82 @@
+use crate::environment::Environment;
+use crate::exec::cmd;
+use crate::utils::io::move_directory;
+use camino::Utf8PathBuf;
+use std::io::Cursor;
+use zip::ZipArchive;
+
+pub(super) struct WindowsEnvironment {
+ checkout_dir: Utf8PathBuf,
+}
+
+impl WindowsEnvironment {
+ pub fn new() -> Self {
+ Self { checkout_dir: std::env::current_dir().unwrap().try_into().unwrap() }
+ }
+}
+
+impl Environment for WindowsEnvironment {
+ fn python_binary(&self) -> &'static str {
+ "python"
+ }
+
+ fn checkout_path(&self) -> Utf8PathBuf {
+ self.checkout_dir.clone()
+ }
+
+ fn host_llvm_dir(&self) -> Utf8PathBuf {
+ self.checkout_path().join("citools").join("clang-rust")
+ }
+
+ fn opt_artifacts(&self) -> Utf8PathBuf {
+ self.checkout_path().join("opt-artifacts")
+ }
+
+ fn build_root(&self) -> Utf8PathBuf {
+ self.checkout_path()
+ }
+
+ fn prepare_rustc_perf(&self) -> anyhow::Result<()> {
+ // FIXME: add some mechanism for synchronization of this commit SHA with
+ // Linux (which builds rustc-perf in a Dockerfile)
+ // rustc-perf version from 2023-05-30
+ const PERF_COMMIT: &str = "8b2ac3042e1ff2c0074455a0a3618adef97156b1";
+
+ let url = format!("https://github.com/rust-lang/rustc-perf/archive/{PERF_COMMIT}.zip");
+ let response = reqwest::blocking::get(url)?.error_for_status()?.bytes()?.to_vec();
+
+ let mut archive = ZipArchive::new(Cursor::new(response))?;
+ archive.extract(self.rustc_perf_dir())?;
+ move_directory(
+ &self.rustc_perf_dir().join(format!("rustc-perf-{PERF_COMMIT}")),
+ &self.rustc_perf_dir(),
+ )?;
+
+ cmd(&[self.cargo_stage_0().as_str(), "build", "-p", "collector"])
+ .workdir(&self.rustc_perf_dir())
+ .env("RUSTC", &self.rustc_stage_0().into_string())
+ .env("RUSTC_BOOTSTRAP", "1")
+ .run()?;
+
+ Ok(())
+ }
+
+ fn supports_bolt(&self) -> bool {
+ false
+ }
+
+ fn supports_shared_llvm(&self) -> bool {
+ false
+ }
+
+ fn executable_extension(&self) -> &'static str {
+ ".exe"
+ }
+
+ fn skipped_tests(&self) -> &'static [&'static str] {
+ &[
+ // Fails as of June 2023.
+ "tests\\codegen\\vec-shrink-panik.rs",
+ ]
+ }
+}
diff --git a/src/tools/opt-dist/src/exec.rs b/src/tools/opt-dist/src/exec.rs
new file mode 100644
index 000000000..4765dceb5
--- /dev/null
+++ b/src/tools/opt-dist/src/exec.rs
@@ -0,0 +1,179 @@
+use crate::environment::Environment;
+use crate::metrics::{load_metrics, record_metrics};
+use crate::timer::TimerSection;
+use crate::training::{LlvmBoltProfile, LlvmPGOProfile, RustcPGOProfile};
+use camino::{Utf8Path, Utf8PathBuf};
+use std::collections::BTreeMap;
+use std::fs::File;
+use std::process::{Command, Stdio};
+
+#[derive(Default)]
+pub struct CmdBuilder {
+ args: Vec<String>,
+ env: BTreeMap<String, String>,
+ workdir: Option<Utf8PathBuf>,
+ output: Option<Utf8PathBuf>,
+}
+
+impl CmdBuilder {
+ pub fn arg<S: ToString>(mut self, arg: S) -> Self {
+ self.args.push(arg.to_string());
+ self
+ }
+
+ pub fn env(mut self, name: &str, value: &str) -> Self {
+ self.env.insert(name.to_string(), value.to_string());
+ self
+ }
+
+ pub fn workdir(mut self, path: &Utf8Path) -> Self {
+ self.workdir = Some(path.to_path_buf());
+ self
+ }
+
+ pub fn redirect_output(mut self, path: Utf8PathBuf) -> Self {
+ self.output = Some(path);
+ self
+ }
+
+ pub fn run(self) -> anyhow::Result<()> {
+ let mut cmd_str = String::new();
+ cmd_str.push_str(
+ &self
+ .env
+ .iter()
+ .map(|(key, value)| format!("{key}={value}"))
+ .collect::<Vec<_>>()
+ .join(" "),
+ );
+ if !self.env.is_empty() {
+ cmd_str.push(' ');
+ }
+ cmd_str.push_str(&self.args.join(" "));
+ if let Some(ref path) = self.output {
+ cmd_str.push_str(&format!(" > {path:?}"));
+ }
+ cmd_str.push_str(&format!(
+ " [at {}]",
+ self.workdir
+ .clone()
+ .unwrap_or_else(|| std::env::current_dir().unwrap().try_into().unwrap())
+ ));
+ log::info!("Executing `{cmd_str}`");
+
+ let mut cmd = Command::new(&self.args[0]);
+ cmd.stdin(Stdio::null());
+ cmd.args(self.args.iter().skip(1));
+ for (key, value) in &self.env {
+ cmd.env(key, value);
+ }
+ if let Some(ref output) = self.output {
+ cmd.stdout(File::create(output.clone().into_std_path_buf())?);
+ }
+ if let Some(ref workdir) = self.workdir {
+ cmd.current_dir(workdir.clone().into_std_path_buf());
+ }
+ let exit_status = cmd.spawn()?.wait()?;
+ if !exit_status.success() {
+ Err(anyhow::anyhow!(
+ "Command {cmd_str} has failed with exit code {:?}",
+ exit_status.code(),
+ ))
+ } else {
+ Ok(())
+ }
+ }
+}
+
+pub fn cmd(args: &[&str]) -> CmdBuilder {
+ assert!(!args.is_empty());
+ CmdBuilder { args: args.iter().map(|s| s.to_string()).collect(), ..Default::default() }
+}
+
+pub struct Bootstrap {
+ cmd: CmdBuilder,
+ metrics_path: Utf8PathBuf,
+}
+
+impl Bootstrap {
+ pub fn build(env: &dyn Environment) -> Self {
+ let metrics_path = env.build_root().join("build").join("metrics.json");
+ let cmd = cmd(&[
+ env.python_binary(),
+ env.checkout_path().join("x.py").as_str(),
+ "build",
+ "--target",
+ &env.host_triple(),
+ "--host",
+ &env.host_triple(),
+ "--stage",
+ "2",
+ "library/std",
+ ])
+ .env("RUST_BACKTRACE", "full");
+ Self { cmd, metrics_path }
+ }
+
+ pub fn dist(env: &dyn Environment, dist_args: &[String]) -> Self {
+ let metrics_path = env.build_root().join("build").join("metrics.json");
+ let cmd = cmd(&dist_args.iter().map(|arg| arg.as_str()).collect::<Vec<_>>())
+ .env("RUST_BACKTRACE", "full");
+ Self { cmd, metrics_path }
+ }
+
+ pub fn llvm_pgo_instrument(mut self, profile_dir: &Utf8Path) -> Self {
+ self.cmd = self
+ .cmd
+ .arg("--llvm-profile-generate")
+ .env("LLVM_PROFILE_DIR", profile_dir.join("prof-%p").as_str());
+ self
+ }
+
+ pub fn llvm_pgo_optimize(mut self, profile: &LlvmPGOProfile) -> Self {
+ self.cmd = self.cmd.arg("--llvm-profile-use").arg(profile.0.as_str());
+ self
+ }
+
+ pub fn rustc_pgo_instrument(mut self, profile_dir: &Utf8Path) -> Self {
+ self.cmd = self.cmd.arg("--rust-profile-generate").arg(profile_dir.as_str());
+ self
+ }
+
+ pub fn without_llvm_lto(mut self) -> Self {
+ self.cmd = self
+ .cmd
+ .arg("--set")
+ .arg("llvm.thin-lto=false")
+ .arg("--set")
+ .arg("llvm.link-shared=true");
+ self
+ }
+
+ pub fn rustc_pgo_optimize(mut self, profile: &RustcPGOProfile) -> Self {
+ self.cmd = self.cmd.arg("--rust-profile-use").arg(profile.0.as_str());
+ self
+ }
+
+ pub fn with_llvm_bolt_ldflags(mut self) -> Self {
+ self.cmd = self.cmd.arg("--set").arg("llvm.ldflags=-Wl,-q");
+ self
+ }
+
+ pub fn with_bolt_profile(mut self, profile: LlvmBoltProfile) -> Self {
+ self.cmd = self.cmd.arg("--reproducible-artifact").arg(profile.0.as_str());
+ self
+ }
+
+ /// Do not rebuild rustc, and use a previously built rustc sysroot instead.
+ pub fn avoid_rustc_rebuild(mut self) -> Self {
+ self.cmd = self.cmd.arg("--keep-stage").arg("0").arg("--keep-stage").arg("1");
+ self
+ }
+
+ pub fn run(self, timer: &mut TimerSection) -> anyhow::Result<()> {
+ self.cmd.run()?;
+ let metrics = load_metrics(&self.metrics_path)?;
+ record_metrics(&metrics, timer);
+ Ok(())
+ }
+}
diff --git a/src/tools/opt-dist/src/main.rs b/src/tools/opt-dist/src/main.rs
new file mode 100644
index 000000000..8ab19674d
--- /dev/null
+++ b/src/tools/opt-dist/src/main.rs
@@ -0,0 +1,215 @@
+use crate::bolt::{bolt_optimize, with_bolt_instrumented};
+use anyhow::Context;
+use log::LevelFilter;
+use utils::io;
+
+use crate::environment::{create_environment, Environment};
+use crate::exec::Bootstrap;
+use crate::tests::run_tests;
+use crate::timer::Timer;
+use crate::training::{gather_llvm_bolt_profiles, gather_llvm_profiles, gather_rustc_profiles};
+use crate::utils::io::reset_directory;
+use crate::utils::{
+ clear_llvm_files, format_env_variables, print_binary_sizes, print_free_disk_space,
+ with_log_group,
+};
+
+mod bolt;
+mod environment;
+mod exec;
+mod metrics;
+mod tests;
+mod timer;
+mod training;
+mod utils;
+
+fn is_try_build() -> bool {
+ std::env::var("DIST_TRY_BUILD").unwrap_or_else(|_| "0".to_string()) != "0"
+}
+
+fn execute_pipeline(
+ env: &dyn Environment,
+ timer: &mut Timer,
+ dist_args: Vec<String>,
+) -> anyhow::Result<()> {
+ reset_directory(&env.opt_artifacts())?;
+
+ with_log_group("Building rustc-perf", || env.prepare_rustc_perf())?;
+
+ // Stage 1: Build PGO instrumented rustc
+ // We use a normal build of LLVM, because gathering PGO profiles for LLVM and `rustc` at the
+ // same time can cause issues, because the host and in-tree LLVM versions can diverge.
+ let rustc_pgo_profile = timer.section("Stage 1 (Rustc PGO)", |stage| {
+ let rustc_profile_dir_root = env.opt_artifacts().join("rustc-pgo");
+
+ stage.section("Build PGO instrumented rustc and LLVM", |section| {
+ let mut builder = Bootstrap::build(env).rustc_pgo_instrument(&rustc_profile_dir_root);
+
+ if env.supports_shared_llvm() {
+ // This first LLVM that we build will be thrown away after this stage, and it
+ // doesn't really need LTO. Without LTO, it builds in ~1 minute thanks to sccache,
+ // with LTO it takes almost 10 minutes. It makes the followup Rustc PGO
+ // instrumented/optimized build a bit slower, but it seems to be worth it.
+ builder = builder.without_llvm_lto();
+ }
+
+ builder.run(section)
+ })?;
+
+ let profile = stage
+ .section("Gather profiles", |_| gather_rustc_profiles(env, &rustc_profile_dir_root))?;
+ print_free_disk_space()?;
+
+ stage.section("Build PGO optimized rustc", |section| {
+ Bootstrap::build(env).rustc_pgo_optimize(&profile).run(section)
+ })?;
+
+ Ok(profile)
+ })?;
+
+ // Stage 2: Gather LLVM PGO profiles
+ // Here we build a PGO instrumented LLVM, reusing the previously PGO optimized rustc.
+ // Then we use the instrumented LLVM to gather LLVM PGO profiles.
+ let llvm_pgo_profile = timer.section("Stage 2 (LLVM PGO)", |stage| {
+ // Remove the previous, uninstrumented build of LLVM.
+ clear_llvm_files(env)?;
+
+ let llvm_profile_dir_root = env.opt_artifacts().join("llvm-pgo");
+
+ stage.section("Build PGO instrumented LLVM", |section| {
+ Bootstrap::build(env)
+ .llvm_pgo_instrument(&llvm_profile_dir_root)
+ .avoid_rustc_rebuild()
+ .run(section)
+ })?;
+
+ let profile = stage
+ .section("Gather profiles", |_| gather_llvm_profiles(env, &llvm_profile_dir_root))?;
+
+ print_free_disk_space()?;
+
+ // Proactively delete the instrumented artifacts, to avoid using them by accident in
+ // follow-up stages.
+ clear_llvm_files(env)?;
+
+ Ok(profile)
+ })?;
+
+ let llvm_bolt_profile = if env.supports_bolt() {
+ // Stage 3: Build BOLT instrumented LLVM
+ // We build a PGO optimized LLVM in this step, then instrument it with BOLT and gather BOLT profiles.
+ // Note that we don't remove LLVM artifacts after this step, so that they are reused in the final dist build.
+ // BOLT instrumentation is performed "on-the-fly" when the LLVM library is copied to the sysroot of rustc,
+ // therefore the LLVM artifacts on disk are not "tainted" with BOLT instrumentation and they can be reused.
+ timer.section("Stage 3 (LLVM BOLT)", |stage| {
+ stage.section("Build PGO optimized LLVM", |stage| {
+ Bootstrap::build(env)
+ .with_llvm_bolt_ldflags()
+ .llvm_pgo_optimize(&llvm_pgo_profile)
+ .avoid_rustc_rebuild()
+ .run(stage)
+ })?;
+
+ // Find the path to the `libLLVM.so` file
+ let llvm_lib = io::find_file_in_dir(
+ &env.build_artifacts().join("stage2").join("lib"),
+ "libLLVM",
+ ".so",
+ )?;
+
+ // Instrument it and gather profiles
+ let profile = with_bolt_instrumented(&llvm_lib, || {
+ stage.section("Gather profiles", |_| gather_llvm_bolt_profiles(env))
+ })?;
+ print_free_disk_space()?;
+
+ // Now optimize the library with BOLT. The `libLLVM-XXX.so` library is actually hard-linked
+ // from several places, and this specific path (`llvm_lib`) will *not* be packaged into
+ // the final dist build. However, when BOLT optimizes an artifact, it does so *in-place*,
+ // therefore it will actually optimize all the hard links, which means that the final
+ // packaged `libLLVM.so` file *will* be BOLT optimized.
+ bolt_optimize(&llvm_lib, &profile).context("Could not optimize LLVM with BOLT")?;
+
+ // LLVM is not being cleared here, we want to use the BOLT-optimized LLVM
+ Ok(Some(profile))
+ })?
+ } else {
+ None
+ };
+
+ let mut dist = Bootstrap::dist(env, &dist_args)
+ .llvm_pgo_optimize(&llvm_pgo_profile)
+ .rustc_pgo_optimize(&rustc_pgo_profile)
+ .avoid_rustc_rebuild();
+
+ if let Some(llvm_bolt_profile) = llvm_bolt_profile {
+ dist = dist.with_bolt_profile(llvm_bolt_profile);
+ }
+
+ // Final stage: Assemble the dist artifacts
+ // The previous PGO optimized rustc build and PGO optimized LLVM builds should be reused.
+ timer.section("Stage 4 (final build)", |stage| dist.run(stage))?;
+
+ // After dist has finished, run a subset of the test suite on the optimized artifacts to discover
+ // possible regressions.
+ // The tests are not executed for try builds, which can be in various broken states, so we don't
+ // want to gatekeep them with tests.
+ if !is_try_build() {
+ timer.section("Run tests", |_| run_tests(env))?;
+ }
+
+ Ok(())
+}
+
+fn main() -> anyhow::Result<()> {
+ // Make sure that we get backtraces for easier debugging in CI
+ std::env::set_var("RUST_BACKTRACE", "1");
+
+ env_logger::builder()
+ .filter_level(LevelFilter::Info)
+ .format_timestamp_millis()
+ .parse_default_env()
+ .init();
+
+ let mut build_args: Vec<String> = std::env::args().skip(1).collect();
+ println!("Running optimized build pipeline with args `{}`", build_args.join(" "));
+
+ with_log_group("Environment values", || {
+ println!("Environment values\n{}", format_env_variables());
+ });
+
+ with_log_group("Printing config.toml", || {
+ if let Ok(config) = std::fs::read_to_string("config.toml") {
+ println!("Contents of `config.toml`:\n{config}");
+ }
+ });
+
+ // Skip components that are not needed for try builds to speed them up
+ if is_try_build() {
+ log::info!("Skipping building of unimportant components for a try build");
+ for target in [
+ "rust-docs",
+ "rustc-docs",
+ "rust-docs-json",
+ "rust-analyzer",
+ "rustc-src",
+ "clippy",
+ "miri",
+ "rustfmt",
+ ] {
+ build_args.extend(["--skip".to_string(), target.to_string()]);
+ }
+ }
+
+ let mut timer = Timer::new();
+ let env = create_environment();
+
+ let result = execute_pipeline(env.as_ref(), &mut timer, build_args);
+ log::info!("Timer results\n{}", timer.format_stats());
+
+ print_free_disk_space()?;
+ result.context("Optimized build pipeline has failed")?;
+ print_binary_sizes(env.as_ref())?;
+
+ Ok(())
+}
diff --git a/src/tools/opt-dist/src/metrics.rs b/src/tools/opt-dist/src/metrics.rs
new file mode 100644
index 000000000..cabe07eda
--- /dev/null
+++ b/src/tools/opt-dist/src/metrics.rs
@@ -0,0 +1,106 @@
+use crate::timer::TimerSection;
+use build_helper::metrics::{JsonNode, JsonRoot};
+use camino::Utf8Path;
+use std::time::Duration;
+
+#[derive(Clone, Debug)]
+pub struct BuildStep {
+ r#type: String,
+ children: Vec<BuildStep>,
+ duration: Duration,
+}
+
+impl BuildStep {
+ pub fn find_all_by_type(&self, r#type: &str) -> Vec<&BuildStep> {
+ let mut result = Vec::new();
+ self.find_by_type(r#type, &mut result);
+ result
+ }
+ fn find_by_type<'a>(&'a self, r#type: &str, result: &mut Vec<&'a BuildStep>) {
+ if self.r#type == r#type {
+ result.push(self);
+ }
+ for child in &self.children {
+ child.find_by_type(r#type, result);
+ }
+ }
+}
+
+/// Loads the metrics of the most recent bootstrap execution from a metrics.json file.
+pub fn load_metrics(path: &Utf8Path) -> anyhow::Result<BuildStep> {
+ let content = std::fs::read(path.as_std_path())?;
+ let mut metrics = serde_json::from_slice::<JsonRoot>(&content)?;
+ let invocation = metrics
+ .invocations
+ .pop()
+ .ok_or_else(|| anyhow::anyhow!("No bootstrap invocation found in metrics file"))?;
+
+ fn parse(node: JsonNode) -> Option<BuildStep> {
+ match node {
+ JsonNode::RustbuildStep {
+ type_: kind,
+ children,
+ duration_excluding_children_sec,
+ ..
+ } => {
+ let children: Vec<_> = children.into_iter().filter_map(parse).collect();
+ let children_duration = children.iter().map(|c| c.duration).sum::<Duration>();
+ Some(BuildStep {
+ r#type: kind.to_string(),
+ children,
+ duration: children_duration
+ + Duration::from_secs_f64(duration_excluding_children_sec),
+ })
+ }
+ JsonNode::TestSuite(_) => None,
+ }
+ }
+
+ let duration = Duration::from_secs_f64(invocation.duration_including_children_sec);
+ let children: Vec<_> = invocation.children.into_iter().filter_map(parse).collect();
+ Ok(BuildStep { r#type: "root".to_string(), children, duration })
+}
+
+/// Logs the individual metrics in a table and add Rustc and LLVM durations to the passed
+/// timer.
+pub fn record_metrics(metrics: &BuildStep, timer: &mut TimerSection) {
+ let llvm_steps = metrics.find_all_by_type("bootstrap::llvm::Llvm");
+ let llvm_duration: Duration = llvm_steps.into_iter().map(|s| s.duration).sum();
+
+ let rustc_steps = metrics.find_all_by_type("bootstrap::compile::Rustc");
+ let rustc_duration: Duration = rustc_steps.into_iter().map(|s| s.duration).sum();
+
+ // The LLVM step is part of the Rustc step
+ let rustc_duration = rustc_duration.saturating_sub(llvm_duration);
+
+ if !llvm_duration.is_zero() {
+ timer.add_duration("LLVM", llvm_duration);
+ }
+ if !rustc_duration.is_zero() {
+ timer.add_duration("Rustc", rustc_duration);
+ }
+
+ log_metrics(metrics);
+}
+
+fn log_metrics(metrics: &BuildStep) {
+ use std::fmt::Write;
+
+ let mut substeps: Vec<(u32, &BuildStep)> = Vec::new();
+
+ fn visit<'a>(step: &'a BuildStep, level: u32, substeps: &mut Vec<(u32, &'a BuildStep)>) {
+ substeps.push((level, step));
+ for child in &step.children {
+ visit(child, level + 1, substeps);
+ }
+ }
+
+ visit(metrics, 0, &mut substeps);
+
+ let mut output = String::new();
+ for (level, step) in substeps {
+ let label = format!("{}{}", ".".repeat(level as usize), step.r#type);
+ writeln!(output, "{label:<65}{:>8.2}s", step.duration.as_secs_f64()).unwrap();
+ }
+ log::info!("Build step durations\n{output}");
+}
diff --git a/src/tools/opt-dist/src/tests.rs b/src/tools/opt-dist/src/tests.rs
new file mode 100644
index 000000000..3dd1a3223
--- /dev/null
+++ b/src/tools/opt-dist/src/tests.rs
@@ -0,0 +1,114 @@
+use crate::environment::Environment;
+use crate::exec::cmd;
+use crate::utils::io::{copy_directory, find_file_in_dir, unpack_archive};
+use anyhow::Context;
+use camino::{Utf8Path, Utf8PathBuf};
+
+/// Run tests on optimized dist artifacts.
+pub fn run_tests(env: &dyn Environment) -> anyhow::Result<()> {
+ // After `dist` is executed, we extract its archived components into a sysroot directory,
+ // and then use that extracted rustc as a stage0 compiler.
+ // Then we run a subset of tests using that compiler, to have a basic smoke test which checks
+ // whether the optimization pipeline hasn't broken something.
+ let build_dir = env.build_root().join("build");
+ let dist_dir = build_dir.join("dist");
+ let unpacked_dist_dir = build_dir.join("unpacked-dist");
+ std::fs::create_dir_all(&unpacked_dist_dir)?;
+
+ let extract_dist_dir = |name: &str| -> anyhow::Result<Utf8PathBuf> {
+ unpack_archive(&dist_dir.join(format!("{name}.tar.xz")), &unpacked_dist_dir)?;
+ let extracted_path = unpacked_dist_dir.join(name);
+ assert!(extracted_path.is_dir());
+ Ok(extracted_path)
+ };
+ let host_triple = env.host_triple();
+ let version = find_dist_version(&dist_dir)?;
+
+ // Extract rustc, libstd, cargo and src archives to create the optimized sysroot
+ let rustc_dir = extract_dist_dir(&format!("rustc-{version}-{host_triple}"))?.join("rustc");
+ let libstd_dir = extract_dist_dir(&format!("rust-std-{version}-{host_triple}"))?
+ .join(format!("rust-std-{host_triple}"));
+ let cargo_dir = extract_dist_dir(&format!("cargo-{version}-{host_triple}"))?.join("cargo");
+ let extracted_src_dir = extract_dist_dir(&format!("rust-src-{version}"))?.join("rust-src");
+
+ // We need to manually copy libstd to the extracted rustc sysroot
+ copy_directory(
+ &libstd_dir.join("lib").join("rustlib").join(&host_triple).join("lib"),
+ &rustc_dir.join("lib").join("rustlib").join(&host_triple).join("lib"),
+ )?;
+
+ // Extract sources - they aren't in the `rustc-nightly-{host}` tarball, so we need to manually copy libstd
+ // sources to the extracted sysroot. We need sources available so that `-Zsimulate-remapped-rust-src-base`
+ // works correctly.
+ copy_directory(
+ &extracted_src_dir.join("lib").join("rustlib").join("src"),
+ &rustc_dir.join("lib").join("rustlib").join("src"),
+ )?;
+
+ let rustc_path = rustc_dir.join("bin").join(format!("rustc{}", env.executable_extension()));
+ assert!(rustc_path.is_file());
+ let cargo_path = cargo_dir.join("bin").join(format!("cargo{}", env.executable_extension()));
+ assert!(cargo_path.is_file());
+
+ // Specify path to a LLVM config so that LLVM is not rebuilt.
+ // It doesn't really matter which LLVM config we choose, because no sysroot will be compiled.
+ let llvm_config = env
+ .build_artifacts()
+ .join("llvm")
+ .join("bin")
+ .join(format!("llvm-config{}", env.executable_extension()));
+ assert!(llvm_config.is_file());
+
+ let config_content = format!(
+ r#"profile = "user"
+changelog-seen = 2
+
+[build]
+rustc = "{rustc}"
+cargo = "{cargo}"
+
+[target.{host_triple}]
+llvm-config = "{llvm_config}"
+"#,
+ rustc = rustc_path.to_string().replace('\\', "/"),
+ cargo = cargo_path.to_string().replace('\\', "/"),
+ llvm_config = llvm_config.to_string().replace('\\', "/")
+ );
+ log::info!("Using following `config.toml` for running tests:\n{config_content}");
+
+ // Simulate a stage 0 compiler with the extracted optimized dist artifacts.
+ std::fs::write("config.toml", config_content)?;
+
+ let x_py = env.checkout_path().join("x.py");
+ let mut args = vec![
+ env.python_binary(),
+ x_py.as_str(),
+ "test",
+ "--stage",
+ "0",
+ "tests/assembly",
+ "tests/codegen",
+ "tests/codegen-units",
+ "tests/incremental",
+ "tests/mir-opt",
+ "tests/pretty",
+ "tests/run-pass-valgrind",
+ "tests/ui",
+ ];
+ for test_path in env.skipped_tests() {
+ args.extend(["--skip", test_path]);
+ }
+ cmd(&args).env("COMPILETEST_FORCE_STAGE0", "1").run().context("Cannot execute tests")
+}
+
+/// Tries to find the version of the dist artifacts (either nightly, beta, or 1.XY.Z).
+fn find_dist_version(directory: &Utf8Path) -> anyhow::Result<String> {
+ // Lookup a known file with a unique prefix and extract the version from its filename
+ let archive = find_file_in_dir(directory, "reproducible-artifacts-", ".tar.xz")?
+ .file_name()
+ .unwrap()
+ .to_string();
+ let (version, _) =
+ archive.strip_prefix("reproducible-artifacts-").unwrap().split_once("-").unwrap();
+ Ok(version.to_string())
+}
diff --git a/src/tools/opt-dist/src/timer.rs b/src/tools/opt-dist/src/timer.rs
new file mode 100644
index 000000000..2b29ba8d5
--- /dev/null
+++ b/src/tools/opt-dist/src/timer.rs
@@ -0,0 +1,167 @@
+use std::ops::{Deref, DerefMut};
+use std::time::{Duration, SystemTime};
+
+pub struct Timer {
+ root: TimerSection,
+}
+
+impl Timer {
+ pub fn new() -> Self {
+ Timer { root: TimerSection::new(None) }
+ }
+
+ pub fn format_stats(&self) -> String {
+ use std::fmt::Write;
+
+ let mut items = Vec::new();
+ for (name, child) in &self.root.children {
+ match child {
+ SectionEntry::SubSection(section) => {
+ section.collect_levels(0, name, &mut items);
+ }
+ SectionEntry::Duration(duration) => items.push((0, name, *duration)),
+ }
+ }
+
+ let rows: Vec<(String, Duration)> = items
+ .into_iter()
+ .map(|(level, name, duration)| (format!("{}{name}:", " ".repeat(level)), duration))
+ .collect();
+
+ let total_duration = self.total_duration();
+ let total_duration_label = "Total duration:".to_string();
+
+ const SPACE_AFTER_LABEL: usize = 2;
+ let max_label_length = 16.max(rows.iter().map(|(label, _)| label.len()).max().unwrap_or(0))
+ + SPACE_AFTER_LABEL;
+
+ let table_width = max_label_length + 23;
+ let divider = "-".repeat(table_width);
+
+ let mut output = String::new();
+ writeln!(output, "{divider}").unwrap();
+ for (label, duration) in rows {
+ let pct = (duration.as_millis() as f64 / total_duration.as_millis() as f64) * 100.0;
+ let duration_fmt = format!("{:>12.2}s ({pct:>5.2}%)", duration.as_secs_f64());
+ writeln!(output, "{label:<0$} {duration_fmt}", max_label_length).unwrap();
+ }
+ output.push('\n');
+
+ let total_duration = Duration::new(total_duration.as_secs(), 0);
+ let total_duration = format!(
+ "{:>1$}",
+ humantime::format_duration(total_duration).to_string(),
+ table_width - total_duration_label.len()
+ );
+ writeln!(output, "{total_duration_label}{total_duration}").unwrap();
+
+ writeln!(output, "{divider}").unwrap();
+ output
+ }
+}
+
+impl Deref for Timer {
+ type Target = TimerSection;
+
+ fn deref(&self) -> &Self::Target {
+ &self.root
+ }
+}
+
+impl DerefMut for Timer {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.root
+ }
+}
+
+pub struct TimerSection {
+ name: Option<String>,
+ children: Vec<(String, SectionEntry)>,
+ duration_excluding_children: Duration,
+}
+
+impl TimerSection {
+ pub fn new(name: Option<String>) -> Self {
+ TimerSection {
+ name,
+ children: Default::default(),
+ duration_excluding_children: Duration::ZERO,
+ }
+ }
+
+ pub fn section<F: FnOnce(&mut TimerSection) -> anyhow::Result<R>, R>(
+ &mut self,
+ name: &str,
+ func: F,
+ ) -> anyhow::Result<R> {
+ let full_name = match &self.name {
+ Some(current_name) => {
+ format!("{current_name} > {name}")
+ }
+ None => name.to_string(),
+ };
+ log::info!("Section `{full_name}` starts");
+ let mut child = TimerSection {
+ name: Some(full_name.clone()),
+ children: Default::default(),
+ duration_excluding_children: Duration::ZERO,
+ };
+
+ let start = SystemTime::now();
+ let result = func(&mut child);
+ let duration = start.elapsed().unwrap();
+
+ let msg = match result {
+ Ok(_) => "OK",
+ Err(_) => "FAIL",
+ };
+
+ child.duration_excluding_children = duration.saturating_sub(child.total_duration());
+
+ log::info!("Section `{full_name}` ended: {msg} ({:.2}s)`", duration.as_secs_f64());
+ self.children.push((name.to_string(), SectionEntry::SubSection(child)));
+ result
+ }
+
+ pub fn add_duration(&mut self, name: &str, duration: Duration) {
+ self.children.push((name.to_string(), SectionEntry::Duration(duration)));
+ }
+
+ fn total_duration(&self) -> Duration {
+ self.duration_excluding_children
+ + self.children.iter().map(|(_, child)| child.total_duration()).sum::<Duration>()
+ }
+
+ fn collect_levels<'a>(
+ &'a self,
+ level: usize,
+ name: &'a str,
+ items: &mut Vec<(usize, &'a str, Duration)>,
+ ) {
+ items.push((level, name, self.total_duration()));
+ for (name, child) in &self.children {
+ match &child {
+ SectionEntry::Duration(duration) => {
+ items.push((level + 1, name, *duration));
+ }
+ SectionEntry::SubSection(section) => {
+ section.collect_levels(level + 1, name, items);
+ }
+ }
+ }
+ }
+}
+
+enum SectionEntry {
+ Duration(Duration),
+ SubSection(TimerSection),
+}
+
+impl SectionEntry {
+ fn total_duration(&self) -> Duration {
+ match self {
+ SectionEntry::Duration(duration) => *duration,
+ SectionEntry::SubSection(timer) => timer.total_duration(),
+ }
+ }
+}
diff --git a/src/tools/opt-dist/src/training.rs b/src/tools/opt-dist/src/training.rs
new file mode 100644
index 000000000..59c73fbd6
--- /dev/null
+++ b/src/tools/opt-dist/src/training.rs
@@ -0,0 +1,223 @@
+use crate::environment::Environment;
+use crate::exec::{cmd, CmdBuilder};
+use crate::utils::io::{count_files, delete_directory};
+use crate::utils::with_log_group;
+use anyhow::Context;
+use camino::{Utf8Path, Utf8PathBuf};
+use humansize::BINARY;
+
+const LLVM_PGO_CRATES: &[&str] = &[
+ "syn-1.0.89",
+ "cargo-0.60.0",
+ "serde-1.0.136",
+ "ripgrep-13.0.0",
+ "regex-1.5.5",
+ "clap-3.1.6",
+ "hyper-0.14.18",
+];
+
+const RUSTC_PGO_CRATES: &[&str] = &[
+ "externs",
+ "ctfe-stress-5",
+ "cargo-0.60.0",
+ "token-stream-stress",
+ "match-stress",
+ "tuple-stress",
+ "diesel-1.4.8",
+ "bitmaps-3.1.0",
+];
+
+const LLVM_BOLT_CRATES: &[&str] = LLVM_PGO_CRATES;
+
+fn init_compiler_benchmarks(
+ env: &dyn Environment,
+ profiles: &[&str],
+ scenarios: &[&str],
+ crates: &[&str],
+) -> CmdBuilder {
+ // Run rustc-perf benchmarks
+ // Benchmark using profile_local with eprintln, which essentially just means
+ // don't actually benchmark -- just make sure we run rustc a bunch of times.
+ cmd(&[
+ env.cargo_stage_0().as_str(),
+ "run",
+ "-p",
+ "collector",
+ "--bin",
+ "collector",
+ "--",
+ "profile_local",
+ "eprintln",
+ env.rustc_stage_2().as_str(),
+ "--id",
+ "Test",
+ "--cargo",
+ env.cargo_stage_0().as_str(),
+ "--profiles",
+ profiles.join(",").as_str(),
+ "--scenarios",
+ scenarios.join(",").as_str(),
+ "--include",
+ crates.join(",").as_str(),
+ ])
+ .env("RUST_LOG", "collector=debug")
+ .env("RUSTC", env.rustc_stage_0().as_str())
+ .env("RUSTC_BOOTSTRAP", "1")
+ .workdir(&env.rustc_perf_dir())
+}
+
+/// Describes which `llvm-profdata` binary should be used for merging PGO profiles.
+enum LlvmProfdata {
+ /// Use llvm-profdata from the host toolchain (i.e. from LLVM provided externally).
+ Host,
+ /// Use llvm-profdata from the target toolchain (i.e. from LLVM built from `src/llvm-project`).
+ Target,
+}
+
+fn merge_llvm_profiles(
+ env: &dyn Environment,
+ merged_path: &Utf8Path,
+ profile_dir: &Utf8Path,
+ profdata: LlvmProfdata,
+) -> anyhow::Result<()> {
+ let llvm_profdata = match profdata {
+ LlvmProfdata::Host => env.host_llvm_dir().join("bin/llvm-profdata"),
+ LlvmProfdata::Target => env
+ .build_artifacts()
+ .join("llvm")
+ .join("build")
+ .join(format!("bin/llvm-profdata{}", env.executable_extension())),
+ };
+
+ cmd(&[llvm_profdata.as_str(), "merge", "-o", merged_path.as_str(), profile_dir.as_str()])
+ .run()
+ .context("Cannot merge LLVM profiles")?;
+ Ok(())
+}
+
+fn log_profile_stats(
+ name: &str,
+ merged_profile: &Utf8Path,
+ profile_root: &Utf8Path,
+) -> anyhow::Result<()> {
+ log::info!("{name} PGO statistics");
+ log::info!(
+ "{merged_profile}: {}",
+ humansize::format_size(std::fs::metadata(merged_profile.as_std_path())?.len(), BINARY)
+ );
+ log::info!(
+ "{profile_root}: {}",
+ humansize::format_size(fs_extra::dir::get_size(profile_root.as_std_path())?, BINARY)
+ );
+ log::info!("Profile file count: {}", count_files(profile_root)?);
+ Ok(())
+}
+
+pub struct LlvmPGOProfile(pub Utf8PathBuf);
+
+pub fn gather_llvm_profiles(
+ env: &dyn Environment,
+ profile_root: &Utf8Path,
+) -> anyhow::Result<LlvmPGOProfile> {
+ log::info!("Running benchmarks with PGO instrumented LLVM");
+
+ with_log_group("Running benchmarks", || {
+ init_compiler_benchmarks(env, &["Debug", "Opt"], &["Full"], LLVM_PGO_CRATES)
+ .run()
+ .context("Cannot gather LLVM PGO profiles")
+ })?;
+
+ let merged_profile = env.opt_artifacts().join("llvm-pgo.profdata");
+ log::info!("Merging LLVM PGO profiles to {merged_profile}");
+
+ merge_llvm_profiles(env, &merged_profile, profile_root, LlvmProfdata::Host)?;
+ log_profile_stats("LLVM", &merged_profile, profile_root)?;
+
+ // We don't need the individual .profraw files now that they have been merged
+ // into a final .profdata
+ delete_directory(profile_root)?;
+
+ Ok(LlvmPGOProfile(merged_profile))
+}
+
+pub struct RustcPGOProfile(pub Utf8PathBuf);
+
+pub fn gather_rustc_profiles(
+ env: &dyn Environment,
+ profile_root: &Utf8Path,
+) -> anyhow::Result<RustcPGOProfile> {
+ log::info!("Running benchmarks with PGO instrumented rustc");
+
+ // The profile data is written into a single filepath that is being repeatedly merged when each
+ // rustc invocation ends. Empirically, this can result in some profiling data being lost. That's
+ // why we override the profile path to include the PID. This will produce many more profiling
+ // files, but the resulting profile will produce a slightly faster rustc binary.
+ let profile_template = profile_root.join("default_%m_%p.profraw");
+
+ // Here we're profiling the `rustc` frontend, so we also include `Check`.
+ // The benchmark set includes various stress tests that put the frontend under pressure.
+ with_log_group("Running benchmarks", || {
+ init_compiler_benchmarks(env, &["Check", "Debug", "Opt"], &["All"], RUSTC_PGO_CRATES)
+ .env("LLVM_PROFILE_FILE", profile_template.as_str())
+ .run()
+ .context("Cannot gather rustc PGO profiles")
+ })?;
+
+ let merged_profile = env.opt_artifacts().join("rustc-pgo.profdata");
+ log::info!("Merging Rustc PGO profiles to {merged_profile}");
+
+ merge_llvm_profiles(env, &merged_profile, profile_root, LlvmProfdata::Target)?;
+ log_profile_stats("Rustc", &merged_profile, profile_root)?;
+
+ // We don't need the individual .profraw files now that they have been merged
+ // into a final .profdata
+ delete_directory(profile_root)?;
+
+ Ok(RustcPGOProfile(merged_profile))
+}
+
+pub struct LlvmBoltProfile(pub Utf8PathBuf);
+
+pub fn gather_llvm_bolt_profiles(env: &dyn Environment) -> anyhow::Result<LlvmBoltProfile> {
+ log::info!("Running benchmarks with BOLT instrumented LLVM");
+
+ with_log_group("Running benchmarks", || {
+ init_compiler_benchmarks(env, &["Check", "Debug", "Opt"], &["Full"], LLVM_BOLT_CRATES)
+ .run()
+ .context("Cannot gather LLVM BOLT profiles")
+ })?;
+
+ let merged_profile = env.opt_artifacts().join("llvm-bolt.profdata");
+ let profile_root = Utf8PathBuf::from("/tmp/prof.fdata");
+ log::info!("Merging LLVM BOLT profiles to {merged_profile}");
+
+ let profiles: Vec<_> =
+ glob::glob(&format!("{profile_root}*"))?.into_iter().collect::<Result<Vec<_>, _>>()?;
+
+ let mut merge_args = vec!["merge-fdata"];
+ merge_args.extend(profiles.iter().map(|p| p.to_str().unwrap()));
+
+ with_log_group("Merging BOLT profiles", || {
+ cmd(&merge_args)
+ .redirect_output(merged_profile.clone())
+ .run()
+ .context("Cannot merge BOLT profiles")
+ })?;
+
+ log::info!("LLVM BOLT statistics");
+ log::info!(
+ "{merged_profile}: {}",
+ humansize::format_size(std::fs::metadata(merged_profile.as_std_path())?.len(), BINARY)
+ );
+
+ let size = profiles
+ .iter()
+ .map(|p| std::fs::metadata(p).map(|metadata| metadata.len()))
+ .collect::<Result<Vec<_>, _>>()?
+ .into_iter()
+ .sum::<u64>();
+ log::info!("{profile_root}: {}", humansize::format_size(size, BINARY));
+ log::info!("Profile file count: {}", profiles.len());
+
+ Ok(LlvmBoltProfile(merged_profile))
+}
diff --git a/src/tools/opt-dist/src/utils/io.rs b/src/tools/opt-dist/src/utils/io.rs
new file mode 100644
index 000000000..8bd516fa3
--- /dev/null
+++ b/src/tools/opt-dist/src/utils/io.rs
@@ -0,0 +1,88 @@
+use anyhow::Context;
+use camino::{Utf8Path, Utf8PathBuf};
+use fs_extra::dir::CopyOptions;
+use std::fs::File;
+use std::path::Path;
+
+/// Delete and re-create the directory.
+pub fn reset_directory(path: &Utf8Path) -> anyhow::Result<()> {
+ log::info!("Resetting directory {path}");
+ let _ = std::fs::remove_dir(path);
+ std::fs::create_dir_all(path)?;
+ Ok(())
+}
+
+pub fn copy_directory(src: &Utf8Path, dst: &Utf8Path) -> anyhow::Result<()> {
+ log::info!("Copying directory {src} to {dst}");
+ fs_extra::dir::copy(src, dst, &CopyOptions::default().copy_inside(true))?;
+ Ok(())
+}
+
+pub fn copy_file<S: AsRef<Path>, D: AsRef<Path>>(src: S, dst: D) -> anyhow::Result<()> {
+ log::info!("Copying file {} to {}", src.as_ref().display(), dst.as_ref().display());
+ std::fs::copy(src.as_ref(), dst.as_ref())?;
+ Ok(())
+}
+
+#[allow(unused)]
+pub fn move_directory(src: &Utf8Path, dst: &Utf8Path) -> anyhow::Result<()> {
+ log::info!("Moving directory {src} to {dst}");
+ fs_extra::dir::move_dir(src, dst, &CopyOptions::default().content_only(true))?;
+ Ok(())
+}
+
+/// Counts all children of a directory (non-recursively).
+pub fn count_files(dir: &Utf8Path) -> anyhow::Result<u64> {
+ Ok(std::fs::read_dir(dir)?.count() as u64)
+}
+
+pub fn delete_directory(path: &Utf8Path) -> anyhow::Result<()> {
+ log::info!("Deleting directory `{path}`");
+ std::fs::remove_dir_all(path.as_std_path())
+ .context(format!("Cannot remove directory {path}"))?;
+ Ok(())
+}
+
+pub fn unpack_archive(path: &Utf8Path, dest_dir: &Utf8Path) -> anyhow::Result<()> {
+ log::info!("Unpacking directory `{path}` into `{dest_dir}`");
+
+ assert!(path.as_str().ends_with(".tar.xz"));
+ let file = File::open(path.as_std_path())?;
+ let file = xz::read::XzDecoder::new(file);
+ let mut archive = tar::Archive::new(file);
+ archive.unpack(dest_dir.as_std_path())?;
+ Ok(())
+}
+
+/// Returns paths in the given `dir` (non-recursively), optionally with the given `suffix`.
+/// The `suffix` should contain the leading dot.
+pub fn get_files_from_dir(
+ dir: &Utf8Path,
+ suffix: Option<&str>,
+) -> anyhow::Result<Vec<Utf8PathBuf>> {
+ let path = format!("{dir}/*{}", suffix.unwrap_or(""));
+
+ Ok(glob::glob(&path)?
+ .into_iter()
+ .map(|p| p.map(|p| Utf8PathBuf::from_path_buf(p).unwrap()))
+ .collect::<Result<Vec<_>, _>>()?)
+}
+
+/// Finds a single file in the specified `directory` with the given `prefix` and `suffix`.
+pub fn find_file_in_dir(
+ directory: &Utf8Path,
+ prefix: &str,
+ suffix: &str,
+) -> anyhow::Result<Utf8PathBuf> {
+ let files = glob::glob(&format!("{directory}/{prefix}*{suffix}"))?
+ .into_iter()
+ .collect::<Result<Vec<_>, _>>()?;
+ match files.len() {
+ 0 => Err(anyhow::anyhow!("No file with prefix {prefix} found in {directory}")),
+ 1 => Ok(Utf8PathBuf::from_path_buf(files[0].clone()).unwrap()),
+ _ => Err(anyhow::anyhow!(
+ "More than one file with prefix {prefix} found in {directory}: {:?}",
+ files
+ )),
+ }
+}
diff --git a/src/tools/opt-dist/src/utils/mod.rs b/src/tools/opt-dist/src/utils/mod.rs
new file mode 100644
index 000000000..9a3df15e3
--- /dev/null
+++ b/src/tools/opt-dist/src/utils/mod.rs
@@ -0,0 +1,75 @@
+pub mod io;
+
+use crate::environment::Environment;
+use crate::utils::io::{delete_directory, get_files_from_dir};
+use humansize::{format_size, BINARY};
+use sysinfo::{DiskExt, RefreshKind, System, SystemExt};
+
+pub fn format_env_variables() -> String {
+ let vars = std::env::vars().map(|(key, value)| format!("{key}={value}")).collect::<Vec<_>>();
+ vars.join("\n")
+}
+
+pub fn print_free_disk_space() -> anyhow::Result<()> {
+ let sys = System::new_with_specifics(RefreshKind::default().with_disks_list().with_disks());
+ let available_space: u64 = sys.disks().iter().map(|d| d.available_space()).sum();
+ let total_space: u64 = sys.disks().iter().map(|d| d.total_space()).sum();
+ let used_space = total_space - available_space;
+
+ log::info!(
+ "Free disk space: {} out of total {} ({:.2}% used)",
+ humansize::format_size(available_space, BINARY),
+ humansize::format_size(total_space, BINARY),
+ (used_space as f64 / total_space as f64) * 100.0
+ );
+ Ok(())
+}
+
+pub fn print_binary_sizes(env: &dyn Environment) -> anyhow::Result<()> {
+ use std::fmt::Write;
+
+ let root = env.build_artifacts().join("stage2");
+
+ let mut files = get_files_from_dir(&root.join("bin"), None)?;
+ files.extend(get_files_from_dir(&root.join("lib"), Some(".so"))?);
+ files.sort_unstable();
+
+ let mut output = String::new();
+ for file in files {
+ let size = std::fs::metadata(file.as_std_path())?.len();
+ let size_formatted = format_size(size, BINARY);
+ let name = format!("{}:", file.file_name().unwrap());
+ writeln!(output, "{name:<50}{size_formatted:>10}")?;
+ }
+
+ log::info!("Rustc artifact size\n{output}");
+
+ Ok(())
+}
+
+pub fn clear_llvm_files(env: &dyn Environment) -> anyhow::Result<()> {
+ // Bootstrap currently doesn't support rebuilding LLVM when PGO options
+ // change (or any other llvm-related options); so just clear out the relevant
+ // directories ourselves.
+ log::info!("Clearing LLVM build files");
+ delete_directory(&env.build_artifacts().join("llvm"))?;
+ delete_directory(&env.build_artifacts().join("lld"))?;
+ Ok(())
+}
+
+/// Wraps all output produced within the `func` closure in a CI output group, if we're running in
+/// CI.
+pub fn with_log_group<F: FnOnce() -> R, R>(group: &str, func: F) -> R {
+ if is_in_ci() {
+ println!("::group::{group}");
+ let result = func();
+ println!("::endgroup::");
+ result
+ } else {
+ func()
+ }
+}
+
+fn is_in_ci() -> bool {
+ std::env::var("GITHUB_ACTIONS").is_ok()
+}
diff --git a/src/tools/rust-analyzer/.editorconfig b/src/tools/rust-analyzer/.editorconfig
index 314f79d3f..f00ade5fd 100644
--- a/src/tools/rust-analyzer/.editorconfig
+++ b/src/tools/rust-analyzer/.editorconfig
@@ -7,13 +7,10 @@ trim_trailing_whitespace = true
end_of_line = lf
insert_final_newline = true
indent_style = space
-
-[*.{rs,toml}]
indent_size = 4
-[*.ts]
-indent_size = 4
-[*.js]
-indent_size = 4
-[*.json]
-indent_size = 4
+[*.md]
+indent_size = 2
+
+[*.{yml, yaml}]
+indent_size = 2
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index 13cb25f7b..a2b263cf2 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -28,9 +28,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.70"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
+checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "anymap"
@@ -46,9 +46,9 @@ checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e"
[[package]]
name = "arrayvec"
-version = "0.7.2"
+version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
+checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "atty"
@@ -77,8 +77,8 @@ dependencies = [
"cc",
"cfg-if",
"libc",
- "miniz_oxide",
- "object",
+ "miniz_oxide 0.6.2",
+ "object 0.30.4",
"rustc-demangle",
]
@@ -87,7 +87,7 @@ name = "base-db"
version = "0.0.0"
dependencies = [
"cfg",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"profile",
"rustc-hash",
"salsa",
@@ -107,9 +107,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.1.0"
+version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3"
+checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
[[package]]
name = "byteorder"
@@ -177,21 +177,21 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c59178fded594fe78c47b841520e5a4399d00fe15fffee19b945958a878cd02d"
+checksum = "ff5053a8a42dbff5279a82423946fc56dc1253b76cf211b2b3c14b3aad4e1281"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
"synstructure",
]
[[package]]
name = "chalk-ir"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8824be92876823b828d551bb792f79eb1f69c69d1948abf69fccbf84e448e57b"
+checksum = "8a56de2146a8ed0fcd54f4bd50db852f1de4eac9e1efe568494f106c21b77d2a"
dependencies = [
"bitflags 1.3.2",
"chalk-derive",
@@ -200,9 +200,9 @@ dependencies = [
[[package]]
name = "chalk-recursive"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e110d1260809c238072d1c8ef84060e39983e8ea9d4c6f74b19b0ebbf8904dc"
+checksum = "5cc09e6e9531f3544989ef89b189e80fbc7ad9e2f73f1c5e03ddc9ffb0527463"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -213,14 +213,14 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12200b19abf4b0633095f7bd099f3ef609d314754b6adb358c68cc04d10589e5"
+checksum = "b392e02b4c81ec76d3748da839fc70a5539b83d27c9030668463d34d5110b860"
dependencies = [
"chalk-derive",
"chalk-ir",
"ena",
- "indexmap",
+ "indexmap 1.9.3",
"itertools",
"petgraph",
"rustc-hash",
@@ -286,22 +286,22 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.14"
+version = "0.9.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695"
+checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
- "memoffset",
+ "memoffset 0.9.0",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.15"
+version = "0.8.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
+checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
dependencies = [
"cfg-if",
]
@@ -313,7 +313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
dependencies = [
"cfg-if",
- "hashbrown",
+ "hashbrown 0.12.3",
"lock_api",
"once_cell",
"parking_lot_core 0.9.6",
@@ -321,13 +321,13 @@ dependencies = [
[[package]]
name = "derive_arbitrary"
-version = "1.3.0"
+version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3cdeb9ec472d588e539a818b2dee436825730da08ad0017c4b1a17676bdc8b7"
+checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
@@ -364,6 +364,12 @@ dependencies = [
]
[[package]]
+name = "equivalent"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1"
+
+[[package]]
name = "expect-test"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -382,7 +388,7 @@ dependencies = [
"cfg-if",
"libc",
"redox_syscall",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -393,12 +399,12 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
[[package]]
name = "flate2"
-version = "1.0.25"
+version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
+checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
dependencies = [
"crc32fast",
- "miniz_oxide",
+ "miniz_oxide 0.7.1",
]
[[package]]
@@ -419,9 +425,9 @@ dependencies = [
[[package]]
name = "form_urlencoded"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
@@ -443,9 +449,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
[[package]]
name = "gimli"
-version = "0.27.2"
+version = "0.27.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4"
+checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "hashbrown"
@@ -454,6 +460,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
+name = "hashbrown"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
+
+[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -509,7 +521,7 @@ dependencies = [
"anymap",
"arrayvec",
"base-db",
- "bitflags 2.1.0",
+ "bitflags 2.3.2",
"cfg",
"cov-mark",
"dashmap",
@@ -517,14 +529,14 @@ dependencies = [
"either",
"expect-test",
"fst",
- "hashbrown",
+ "hashbrown 0.12.3",
"hir-expand",
"hkalbasi-rustc-ap-rustc_abi",
"hkalbasi-rustc-ap-rustc_index",
- "indexmap",
+ "indexmap 2.0.0",
"intern",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"once_cell",
@@ -548,10 +560,10 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
- "hashbrown",
+ "hashbrown 0.12.3",
"intern",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"profile",
@@ -570,7 +582,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.1.0",
+ "bitflags 2.3.2",
"chalk-derive",
"chalk-ir",
"chalk-recursive",
@@ -584,10 +596,11 @@ dependencies = [
"hkalbasi-rustc-ap-rustc_index",
"intern",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"nohash-hasher",
"once_cell",
+ "oorandom",
"profile",
"project-model",
"rustc-hash",
@@ -626,11 +639,11 @@ dependencies = [
[[package]]
name = "home"
-version = "0.5.4"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
+checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
dependencies = [
- "winapi",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -715,10 +728,10 @@ dependencies = [
"expect-test",
"fst",
"hir",
- "indexmap",
+ "indexmap 2.0.0",
"itertools",
"limit",
- "line-index",
+ "line-index 0.1.0-pre.1",
"memchr",
"nohash-hasher",
"once_cell",
@@ -748,6 +761,7 @@ dependencies = [
"hir",
"ide-db",
"itertools",
+ "once_cell",
"profile",
"serde_json",
"sourcegen",
@@ -777,9 +791,9 @@ dependencies = [
[[package]]
name = "idna"
-version = "0.3.0"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -792,7 +806,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
- "hashbrown",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.14.0",
]
[[package]]
@@ -829,7 +853,7 @@ name = "intern"
version = "0.0.0"
dependencies = [
"dashmap",
- "hashbrown",
+ "hashbrown 0.12.3",
"once_cell",
"rustc-hash",
"triomphe",
@@ -878,7 +902,13 @@ dependencies = [
[[package]]
name = "la-arena"
-version = "0.3.0"
+version = "0.3.1"
+
+[[package]]
+name = "la-arena"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3752f229dcc5a481d60f385fa479ff46818033d881d2d801aa27dffcfb5e8306"
[[package]]
name = "lazy_static"
@@ -888,25 +918,25 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.141"
+version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5"
+checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
-version = "0.7.4"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
+checksum = "d580318f95776505201b28cf98eb1fa5e4be3b689633ba6a3e6cd880ff22d8cb"
dependencies = [
"cfg-if",
- "winapi",
+ "windows-sys 0.48.0",
]
[[package]]
name = "libmimalloc-sys"
-version = "0.1.32"
+version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43a558e3d911bc3c7bfc8c78bc580b404d6e51c1cefbf656e176a94b49b0df40"
+checksum = "f4ac0e912c8ef1b735e92369695618dc5b1819f5a7bf3f167301a3ba1cea515e"
dependencies = [
"cc",
"libc",
@@ -919,16 +949,43 @@ version = "0.0.0"
[[package]]
name = "line-index"
version = "0.1.0-pre.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce"
dependencies = [
"nohash-hasher",
"text-size",
]
[[package]]
+name = "line-index"
+version = "0.1.0"
+dependencies = [
+ "nohash-hasher",
+ "text-size",
+]
+
+[[package]]
+name = "load-cargo"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "crossbeam-channel",
+ "ide",
+ "ide-db",
+ "itertools",
+ "proc-macro-api",
+ "project-model",
+ "tracing",
+ "tt",
+ "vfs",
+ "vfs-notify",
+]
+
+[[package]]
name = "lock_api"
-version = "0.4.9"
+version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
+checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
dependencies = [
"autocfg",
"scopeguard",
@@ -936,16 +993,25 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.17"
+version = "0.4.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
+
+[[package]]
+name = "lsp-server"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+checksum = "3711e4d6f491dc9edc0f1df80e204f38206775ac92c1241e89b79229a850bc00"
dependencies = [
- "cfg-if",
+ "crossbeam-channel",
+ "log",
+ "serde",
+ "serde_json",
]
[[package]]
name = "lsp-server"
-version = "0.7.0"
+version = "0.7.2"
dependencies = [
"crossbeam-channel",
"log",
@@ -968,15 +1034,6 @@ dependencies = [
]
[[package]]
-name = "matchers"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
-dependencies = [
- "regex-automata",
-]
-
-[[package]]
name = "mbe"
version = "0.0.0"
dependencies = [
@@ -1016,10 +1073,19 @@ dependencies = [
]
[[package]]
+name = "memoffset"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
name = "mimalloc"
-version = "0.1.36"
+version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d88dad3f985ec267a3fcb7a1726f5cb1a7e8cad8b646e70a84f967210df23da"
+checksum = "4e2894987a3459f3ffb755608bd82188f8ed00d0ae077f1edea29c068d639d98"
dependencies = [
"libmimalloc-sys",
]
@@ -1034,6 +1100,15 @@ dependencies = [
]
[[package]]
+name = "miniz_oxide"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
+dependencies = [
+ "adler",
+]
+
+[[package]]
name = "mio"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1042,7 +1117,7 @@ dependencies = [
"libc",
"log",
"wasi",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1051,7 +1126,7 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
dependencies = [
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1087,7 +1162,7 @@ dependencies = [
"libc",
"mio",
"walkdir",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1112,18 +1187,27 @@ dependencies = [
[[package]]
name = "object"
-version = "0.30.3"
+version = "0.30.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439"
+checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "object"
+version = "0.32.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
-version = "1.17.1"
+version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "oorandom"
@@ -1182,7 +1266,7 @@ dependencies = [
"libc",
"redox_syscall",
"smallvec",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1209,9 +1293,9 @@ version = "0.0.0"
[[package]]
name = "percent-encoding"
-version = "2.2.0"
+version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "perf-event"
@@ -1239,7 +1323,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
dependencies = [
"fixedbitset",
- "indexmap",
+ "indexmap 1.9.3",
]
[[package]]
@@ -1253,7 +1337,7 @@ name = "proc-macro-api"
version = "0.0.0"
dependencies = [
"memmap2",
- "object",
+ "object 0.32.0",
"paths",
"profile",
"serde",
@@ -1273,7 +1357,7 @@ dependencies = [
"libloading",
"mbe",
"memmap2",
- "object",
+ "object 0.32.0",
"paths",
"proc-macro-api",
"proc-macro-test",
@@ -1317,7 +1401,7 @@ version = "0.0.0"
dependencies = [
"cfg-if",
"countme",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc",
"once_cell",
"perf-event",
@@ -1335,7 +1419,7 @@ dependencies = [
"cfg",
"expect-test",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"paths",
"profile",
"rustc-hash",
@@ -1370,9 +1454,9 @@ dependencies = [
[[package]]
name = "pulldown-cmark"
-version = "0.9.2"
+version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
+checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998"
dependencies = [
"bitflags 1.3.2",
"memchr",
@@ -1390,9 +1474,9 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.26"
+version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
+checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
@@ -1439,38 +1523,14 @@ dependencies = [
]
[[package]]
-name = "regex"
-version = "1.7.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
-dependencies = [
- "regex-syntax",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
-dependencies = [
- "regex-syntax",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.6.29"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
-
-[[package]]
name = "rowan"
version = "0.15.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf"
dependencies = [
"countme",
- "hashbrown",
- "memoffset",
+ "hashbrown 0.12.3",
+ "memoffset 0.8.0",
"rustc-hash",
"text-size",
]
@@ -1494,7 +1554,8 @@ dependencies = [
"ide-db",
"ide-ssr",
"itertools",
- "lsp-server",
+ "load-cargo",
+ "lsp-server 0.7.1",
"lsp-types",
"mbe",
"mimalloc",
@@ -1512,12 +1573,10 @@ dependencies = [
"scip",
"serde",
"serde_json",
- "serde_repr",
"sourcegen",
"stdx",
"syntax",
"test-utils",
- "thiserror",
"tikv-jemallocator",
"toolchain",
"tracing",
@@ -1525,7 +1584,6 @@ dependencies = [
"tracing-subscriber",
"tracing-tree",
"triomphe",
- "tt",
"vfs",
"vfs-notify",
"winapi",
@@ -1535,9 +1593,9 @@ dependencies = [
[[package]]
name = "rustc-demangle"
-version = "0.1.22"
+version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b"
+checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-hash"
@@ -1558,7 +1616,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
dependencies = [
"crossbeam-utils",
- "indexmap",
+ "indexmap 1.9.3",
"lock_api",
"log",
"oorandom",
@@ -1641,11 +1699,11 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.96"
+version = "1.0.97"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
+checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a"
dependencies = [
- "indexmap",
+ "indexmap 1.9.3",
"itoa",
"ryu",
"serde",
@@ -1653,13 +1711,13 @@ dependencies = [
[[package]]
name = "serde_repr"
-version = "0.1.11"
+version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "395627de918015623b32e7669714206363a7fc00382bf477e72c1f7533e8eafc"
+checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
@@ -1731,9 +1789,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.15"
+version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
+checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
dependencies = [
"proc-macro2",
"quote",
@@ -1748,7 +1806,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
"unicode-xid",
]
@@ -1759,7 +1817,7 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
- "indexmap",
+ "indexmap 2.0.0",
"itertools",
"once_cell",
"parser",
@@ -1806,22 +1864,22 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
[[package]]
name = "thiserror"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c"
+checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
+checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
@@ -1867,9 +1925,9 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.20"
+version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890"
+checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd"
dependencies = [
"serde",
"time-core",
@@ -1877,9 +1935,9 @@ dependencies = [
[[package]]
name = "time-core"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
+checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
[[package]]
name = "tinyvec"
@@ -1917,20 +1975,20 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.23"
+version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
+checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
name = "tracing-core"
-version = "0.1.30"
+version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
+checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
@@ -1949,25 +2007,21 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
-version = "0.3.16"
+version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70"
+checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
- "matchers",
- "once_cell",
- "regex",
"sharded-slab",
"thread_local",
- "tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "tracing-tree"
-version = "0.2.2"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "758e983ab7c54fee18403994507e7f212b9005e957ce7984996fac8d11facedb"
+checksum = "4f9742d8df709837409dbb22aa25dd7769c260406f20ff48a2320b80a4a6aed0"
dependencies = [
"atty",
"nu-ansi-term",
@@ -2060,9 +2114,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
[[package]]
name = "unicode-ident"
-version = "1.0.8"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
+checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-normalization"
@@ -2087,9 +2141,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "url"
-version = "2.3.1"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb"
dependencies = [
"form_urlencoded",
"idna",
@@ -2114,7 +2168,7 @@ name = "vfs"
version = "0.0.0"
dependencies = [
"fst",
- "indexmap",
+ "indexmap 2.0.0",
"nohash-hasher",
"paths",
"rustc-hash",
@@ -2187,13 +2241,37 @@ version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
+ "windows_aarch64_gnullvm 0.42.2",
+ "windows_aarch64_msvc 0.42.2",
+ "windows_i686_gnu 0.42.2",
+ "windows_i686_msvc 0.42.2",
+ "windows_x86_64_gnu 0.42.2",
+ "windows_x86_64_gnullvm 0.42.2",
+ "windows_x86_64_msvc 0.42.2",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
+dependencies = [
+ "windows_aarch64_gnullvm 0.48.0",
+ "windows_aarch64_msvc 0.48.0",
+ "windows_i686_gnu 0.48.0",
+ "windows_i686_msvc 0.48.0",
+ "windows_x86_64_gnu 0.48.0",
+ "windows_x86_64_gnullvm 0.48.0",
+ "windows_x86_64_msvc 0.48.0",
]
[[package]]
@@ -2203,42 +2281,84 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+
+[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+
+[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
+name = "windows_i686_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+
+[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
+name = "windows_i686_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+
+[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+
+[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+
+[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+
+[[package]]
name = "write-json"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2289,9 +2409,9 @@ dependencies = [
[[package]]
name = "zip"
-version = "0.6.4"
+version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef"
+checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
dependencies = [
"byteorder",
"crc32fast",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 3050cf764..f6a50bfa6 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
-rust-version = "1.66"
+rust-version = "1.70"
edition = "2021"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@@ -35,6 +35,10 @@ debug = 0
# chalk-ir = { path = "../chalk/chalk-ir" }
# chalk-recursive = { path = "../chalk/chalk-recursive" }
# chalk-derive = { path = "../chalk/chalk-derive" }
+# line-index = { path = "lib/line-index" }
+# la-arena = { path = "lib/la-arena" }
+# lsp-server = { path = "lib/lsp-server" }
+
# ungrammar = { path = "../ungrammar" }
@@ -57,13 +61,13 @@ ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
intern = { path = "./crates/intern", version = "0.0.0" }
limit = { path = "./crates/limit", version = "0.0.0" }
+load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
mbe = { path = "./crates/mbe", version = "0.0.0" }
parser = { path = "./crates/parser", version = "0.0.0" }
paths = { path = "./crates/paths", version = "0.0.0" }
proc-macro-api = { path = "./crates/proc-macro-api", version = "0.0.0" }
proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
-proc-macro-test = { path = "./crates/proc-macro-test", version = "0.0.0" }
profile = { path = "./crates/profile", version = "0.0.0" }
project-model = { path = "./crates/project-model", version = "0.0.0" }
sourcegen = { path = "./crates/sourcegen", version = "0.0.0" }
@@ -75,7 +79,14 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
tt = { path = "./crates/tt", version = "0.0.0" }
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
-line-index = { version = "0.1.0-pre.1", path = "./lib/line-index" }
+
+# local crates that aren't published to crates.io. These should not have versions.
+proc-macro-test = { path = "./crates/proc-macro-test" }
+
+# In-tree crates that are published separately and follow semver. See lib/README.md
+line-index = { version = "0.1.0-pre.1" }
+la-arena = { version = "0.3.1" }
+lsp-server = { version = "0.7.1" }
# non-local crates
smallvec = { version = "1.10.0", features = [
@@ -86,9 +97,10 @@ smallvec = { version = "1.10.0", features = [
smol_str = "0.2.0"
nohash-hasher = "0.2.0"
text-size = "1.1.0"
-# the following crates are pinned to prevent us from pulling in syn 2 until all our dependencies have moved
-serde = { version = "=1.0.156", features = ["derive"] }
-serde_json = "1.0.94"
+serde = { version = "1.0.156", features = ["derive"] }
+serde_json = "1.0.96"
triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
+# can't upgrade due to dashmap depending on 0.12.3 currently
+hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false }
rustc_lexer = { version = "0.1.0", package = "ra-ap-rustc_lexer" }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index 6001772c8..171c113a9 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -17,7 +17,7 @@ rustc-hash = "1.1.0"
triomphe.workspace = true
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
# local deps
cfg.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
index d3abc3870..323ee4260 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -26,7 +26,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
let fixture = ChangeFixture::parse(ra_fixture);
let mut db = Self::default();
fixture.change.apply(&mut db);
- assert_eq!(fixture.files.len(), 1);
+ assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
(db, fixture.files[0])
}
@@ -102,6 +102,8 @@ pub struct ChangeFixture {
pub change: Change,
}
+const SOURCE_ROOT_PREFIX: &str = "/";
+
impl ChangeFixture {
pub fn parse(ra_fixture: &str) -> ChangeFixture {
Self::parse_with_proc_macros(ra_fixture, Vec::new())
@@ -131,7 +133,6 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
- let source_root_prefix = "/".to_string();
let mut file_id = FileId(0);
let mut roots = Vec::new();
@@ -151,19 +152,23 @@ impl ChangeFixture {
entry.text.clone()
};
- let meta = FileMeta::from(entry);
- assert!(meta.path.starts_with(&source_root_prefix));
+ let meta = FileMeta::from_fixture(entry, current_source_root_kind);
+ assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
if !meta.deps.is_empty() {
assert!(meta.krate.is_some(), "can't specify deps without naming the crate")
}
- if let Some(kind) = &meta.introduce_new_source_root {
- let root = match current_source_root_kind {
+ if let Some(kind) = meta.introduce_new_source_root {
+ assert!(
+ meta.krate.is_some(),
+ "new_source_root meta doesn't make sense without crate meta"
+ );
+ let prev_kind = mem::replace(&mut current_source_root_kind, kind);
+ let prev_root = match prev_kind {
SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
};
- roots.push(root);
- current_source_root_kind = *kind;
+ roots.push(prev_root);
}
if let Some((krate, origin, version)) = meta.krate {
@@ -185,7 +190,7 @@ impl ChangeFixture {
Some(toolchain),
);
let prev = crates.insert(crate_name.clone(), crate_id);
- assert!(prev.is_none());
+ assert!(prev.is_none(), "multiple crates with same name: {}", crate_name);
for dep in meta.deps {
let prelude = meta.extern_prelude.contains(&dep);
let dep = CrateName::normalize_dashes(&dep);
@@ -219,7 +224,7 @@ impl ChangeFixture {
false,
CrateOrigin::Local { repo: None, name: None },
default_target_data_layout
- .map(|x| x.into())
+ .map(|it| it.into())
.ok_or_else(|| "target_data_layout unset".into()),
Some(toolchain),
);
@@ -442,51 +447,74 @@ struct FileMeta {
target_data_layout: Option<String>,
}
-fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
- if let Some((a, b)) = crate_str.split_once('@') {
- let (version, origin) = match b.split_once(':') {
- Some(("CratesIo", data)) => match data.split_once(',') {
- Some((version, url)) => {
- (version, CrateOrigin::Local { repo: Some(url.to_owned()), name: None })
- }
- _ => panic!("Bad crates.io parameter: {data}"),
- },
- _ => panic!("Bad string for crate origin: {b}"),
- };
- (a.to_owned(), origin, Some(version.to_string()))
- } else {
- let crate_origin = match LangCrateOrigin::from(&*crate_str) {
- LangCrateOrigin::Other => CrateOrigin::Local { repo: None, name: None },
- origin => CrateOrigin::Lang(origin),
- };
- (crate_str, crate_origin, None)
- }
-}
-
-impl From<Fixture> for FileMeta {
- fn from(f: Fixture) -> FileMeta {
+impl FileMeta {
+ fn from_fixture(f: Fixture, current_source_root_kind: SourceRootKind) -> Self {
let mut cfg = CfgOptions::default();
- f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into()));
- f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into()));
+ for (k, v) in f.cfgs {
+ if let Some(v) = v {
+ cfg.insert_key_value(k.into(), v.into());
+ } else {
+ cfg.insert_atom(k.into());
+ }
+ }
+
+ let introduce_new_source_root = f.introduce_new_source_root.map(|kind| match &*kind {
+ "local" => SourceRootKind::Local,
+ "library" => SourceRootKind::Library,
+ invalid => panic!("invalid source root kind '{invalid}'"),
+ });
+ let current_source_root_kind =
+ introduce_new_source_root.unwrap_or(current_source_root_kind);
+
let deps = f.deps;
- FileMeta {
+ Self {
path: f.path,
- krate: f.krate.map(parse_crate),
+ krate: f.krate.map(|it| parse_crate(it, current_source_root_kind, f.library)),
extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()),
deps,
cfg,
- edition: f.edition.as_ref().map_or(Edition::CURRENT, |v| Edition::from_str(v).unwrap()),
+ edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()),
env: f.env.into_iter().collect(),
- introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind {
- "local" => SourceRootKind::Local,
- "library" => SourceRootKind::Library,
- invalid => panic!("invalid source root kind '{invalid}'"),
- }),
+ introduce_new_source_root,
target_data_layout: f.target_data_layout,
}
}
}
+fn parse_crate(
+ crate_str: String,
+ current_source_root_kind: SourceRootKind,
+ explicit_non_workspace_member: bool,
+) -> (String, CrateOrigin, Option<String>) {
+ // syntax:
+ // "my_awesome_crate"
+ // "my_awesome_crate@0.0.1,http://example.com"
+ let (name, repo, version) = if let Some((name, remain)) = crate_str.split_once('@') {
+ let (version, repo) =
+ remain.split_once(',').expect("crate meta: found '@' without version and url");
+ (name.to_owned(), Some(repo.to_owned()), Some(version.to_owned()))
+ } else {
+ (crate_str, None, None)
+ };
+
+ let non_workspace_member = explicit_non_workspace_member
+ || matches!(current_source_root_kind, SourceRootKind::Library);
+
+ let origin = match LangCrateOrigin::from(&*name) {
+ LangCrateOrigin::Other => {
+ let name = name.clone();
+ if non_workspace_member {
+ CrateOrigin::Library { repo, name }
+ } else {
+ CrateOrigin::Local { repo, name: Some(name) }
+ }
+ }
+ origin => CrateOrigin::Lang(origin),
+ };
+
+ (name, origin, version)
+}
+
// Identity mapping
#[derive(Debug)]
struct IdentityProcMacroExpander;
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index f2e523675..c47799f13 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -138,12 +138,12 @@ impl ops::Deref for CrateName {
}
}
-/// Origin of the crates. It is used in emitting monikers.
+/// Origin of the crates.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CrateOrigin {
- /// Crates that are from the rustc workspace
+ /// Crates that are from the rustc workspace.
Rustc { name: String },
- /// Crates that are workspace members,
+ /// Crates that are workspace members.
Local { repo: Option<String>, name: Option<String> },
/// Crates that are non member libraries.
Library { repo: Option<String>, name: String },
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index 0880bc239..ed3808972 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -18,13 +18,13 @@ rustc-hash = "1.1.0"
tt.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.4.1"
oorandom = "11.1.3"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.2.2"
-derive_arbitrary = "1.2.2"
+arbitrary = "1.3.0"
+derive_arbitrary = "1.3.1"
# local deps
mbe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
index 495119d55..183b9b7d2 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -69,7 +69,7 @@ impl CfgOptions {
}
pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
- self.enabled.iter().map(|x| match x {
+ self.enabled.iter().map(|it| match it {
CfgAtom::Flag(key) => key,
CfgAtom::KeyValue { key, .. } => key,
})
@@ -79,7 +79,7 @@ impl CfgOptions {
&'a self,
cfg_key: &'a str,
) -> impl Iterator<Item = &'a SmolStr> + 'a {
- self.enabled.iter().filter_map(move |x| match x {
+ self.enabled.iter().filter_map(move |it| match it {
CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
_ => None,
})
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
index 3f6671b1c..e7f7adc78 100644
--- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -12,9 +12,9 @@ rust-version.workspace = true
doctest = false
[dependencies]
-crossbeam-channel = "0.5.5"
+crossbeam-channel = "0.5.8"
tracing = "0.1.37"
-cargo_metadata = "0.15.0"
+cargo_metadata = "0.15.4"
rustc-hash = "1.1.0"
serde_json.workspace = true
serde.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index 83c705164..30307deb7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -21,14 +21,14 @@ dashmap = { version = "=5.4.0", features = ["raw-api"] }
drop_bomb = "0.1.5"
either = "1.7.0"
fst = { version = "0.4.7", default-features = false }
-hashbrown = { version = "0.12.1", default-features = false }
-indexmap = "1.9.1"
+indexmap = "2.0.0"
itertools = "0.10.5"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
once_cell = "1.17.0"
rustc-hash = "1.1.0"
-smallvec.workspace = true
tracing = "0.1.35"
+smallvec.workspace = true
+hashbrown.workspace = true
triomphe.workspace = true
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index bab3bbc23..fae071118 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -137,13 +137,16 @@ impl Attrs {
let cfg_options = &crate_graph[krate].cfg_options;
- let Some(variant) = enum_.variants.clone().filter(|variant| {
- let attrs = item_tree.attrs(db, krate, (*variant).into());
- attrs.is_cfg_enabled(cfg_options)
- })
- .zip(0u32..)
- .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx)))
- .map(|(variant, _idx)| variant)
+ let Some(variant) = enum_
+ .variants
+ .clone()
+ .filter(|variant| {
+ let attrs = item_tree.attrs(db, krate, (*variant).into());
+ attrs.is_cfg_enabled(cfg_options)
+ })
+ .zip(0u32..)
+ .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx)))
+ .map(|(variant, _idx)| variant)
else {
return Arc::new(res);
};
@@ -272,6 +275,25 @@ impl Attrs {
self.by_key("proc_macro_derive").exists()
}
+ pub fn is_test(&self) -> bool {
+ self.iter().any(|it| {
+ it.path()
+ .segments()
+ .iter()
+ .rev()
+ .zip(["core", "prelude", "v1", "test"].iter().rev())
+ .all(|it| it.0.as_str() == Some(it.1))
+ })
+ }
+
+ pub fn is_ignore(&self) -> bool {
+ self.by_key("ignore").exists()
+ }
+
+ pub fn is_bench(&self) -> bool {
+ self.by_key("bench").exists()
+ }
+
pub fn is_unstable(&self) -> bool {
self.by_key("unstable").exists()
}
@@ -282,7 +304,7 @@ use std::slice::Iter as SliceIter;
pub enum DocAtom {
/// eg. `#[doc(hidden)]`
Flag(SmolStr),
- /// eg. `#[doc(alias = "x")]`
+ /// eg. `#[doc(alias = "it")]`
///
/// Note that a key can have multiple values that are all considered "active" at the same time.
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
@@ -462,6 +484,8 @@ impl AttrsWithOwner {
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
};
let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
@@ -546,6 +570,8 @@ impl AttrsWithOwner {
.map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
},
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
+ AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
+ AttrDefId::UseId(id) => any_has_attrs(db, id),
};
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
index 94dc39b11..f8d492d0e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -273,10 +273,10 @@ impl Body {
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
match self.binding_owners.get(&binding) {
- Some(x) => {
+ Some(it) => {
// We assign expression ids in a way that outer closures will receive
// a lower id
- x.into_raw() < relative_to.into_raw()
+ it.into_raw() < relative_to.into_raw()
}
None => true,
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index b375ec63a..3853a6ab3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -297,11 +297,11 @@ impl ExprCollector<'_> {
let (result_expr_id, prev_binding_owner) =
this.initialize_binding_owner(syntax_ptr);
let inner_expr = this.collect_block(e);
- let x = this.db.intern_anonymous_const(ConstBlockLoc {
+ let it = this.db.intern_anonymous_const(ConstBlockLoc {
parent: this.owner,
root: inner_expr,
});
- this.body.exprs[result_expr_id] = Expr::Const(x);
+ this.body.exprs[result_expr_id] = Expr::Const(it);
this.current_binding_owner = prev_binding_owner;
result_expr_id
})
@@ -313,21 +313,15 @@ impl ExprCollector<'_> {
let body = self.collect_labelled_block_opt(label, e.loop_body());
self.alloc_expr(Expr::Loop { body, label }, syntax_ptr)
}
- ast::Expr::WhileExpr(e) => {
- let label = e.label().map(|label| self.collect_label(label));
- let body = self.collect_labelled_block_opt(label, e.loop_body());
- let condition = self.collect_expr_opt(e.condition());
-
- self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
- }
+ ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
ast::Expr::CallExpr(e) => {
let is_rustc_box = {
let attrs = e.attrs();
- attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box")
+ attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
};
if is_rustc_box {
- let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next()));
+ let expr = self.collect_expr_opt(e.arg_list().and_then(|it| it.args().next()));
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
} else {
let callee = self.collect_expr_opt(e.expr());
@@ -731,6 +725,32 @@ impl ExprCollector<'_> {
expr_id
}
+ /// Desugar `ast::WhileExpr` from: `[opt_ident]: while <cond> <body>` into:
+ /// ```ignore (pseudo-rust)
+ /// [opt_ident]: loop {
+ /// if <cond> {
+ /// <body>
+ /// }
+ /// else {
+ /// break;
+ /// }
+ /// }
+ /// ```
+ /// FIXME: Rustc wraps the condition in a construct equivalent to `{ let _t = <cond>; _t }`
+ /// to preserve drop semantics. We should probably do the same in future.
+ fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExpr) -> ExprId {
+ let label = e.label().map(|label| self.collect_label(label));
+ let body = self.collect_labelled_block_opt(label, e.loop_body());
+ let condition = self.collect_expr_opt(e.condition());
+ let break_expr =
+ self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone());
+ let if_expr = self.alloc_expr(
+ Expr::If { condition, then_branch: body, else_branch: Some(break_expr) },
+ syntax_ptr.clone(),
+ );
+ self.alloc_expr(Expr::Loop { body: if_expr, label }, syntax_ptr)
+ }
+
/// Desugar `ast::ForExpr` from: `[opt_ident]: for <pat> in <head> <body>` into:
/// ```ignore (pseudo-rust)
/// match IntoIterator::into_iter(<head>) {
@@ -781,7 +801,7 @@ impl ExprCollector<'_> {
pat: self.alloc_pat_desugared(some_pat),
guard: None,
expr: self.with_opt_labeled_rib(label, |this| {
- this.collect_expr_opt(e.loop_body().map(|x| x.into()))
+ this.collect_expr_opt(e.loop_body().map(|it| it.into()))
}),
};
let iter_name = Name::generate_new_name();
@@ -874,10 +894,10 @@ impl ExprCollector<'_> {
}),
guard: None,
expr: {
- let x = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
+ let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone());
let result = self.alloc_expr(
- Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false },
+ Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false },
syntax_ptr.clone(),
);
self.alloc_expr(
@@ -893,15 +913,14 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
}
- fn collect_macro_call<F, T, U>(
+ fn collect_macro_call<T, U>(
&mut self,
mcall: ast::MacroCall,
syntax_ptr: AstPtr<ast::MacroCall>,
record_diagnostics: bool,
- collector: F,
+ collector: impl FnOnce(&mut Self, Option<T>) -> U,
) -> U
where
- F: FnOnce(&mut Self, Option<T>) -> U,
T: ast::AstNode,
{
// File containing the macro call. Expansion errors will be attached here.
@@ -1240,12 +1259,12 @@ impl ExprCollector<'_> {
pats.push(self.collect_pat(first, binding_list));
binding_list.reject_new = true;
for rest in it {
- for (_, x) in binding_list.is_used.iter_mut() {
- *x = false;
+ for (_, it) in binding_list.is_used.iter_mut() {
+ *it = false;
}
pats.push(self.collect_pat(rest, binding_list));
- for (&id, &x) in binding_list.is_used.iter() {
- if !x {
+ for (&id, &is_used) in binding_list.is_used.iter() {
+ if !is_used {
self.body.bindings[id].problems =
Some(BindingProblems::NotBoundAcrossAll);
}
@@ -1352,9 +1371,9 @@ impl ExprCollector<'_> {
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
ast::Pat::RangePat(p) => {
let mut range_part_lower = |p: Option<ast::Pat>| {
- p.and_then(|x| match &x {
- ast::Pat::LiteralPat(x) => {
- Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(x)?.0)))
+ p.and_then(|it| match &it {
+ ast::Pat::LiteralPat(it) => {
+ Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
}
ast::Pat::IdentPat(p) => {
let name =
@@ -1451,9 +1470,7 @@ impl ExprCollector<'_> {
&self,
lifetime: Option<ast::Lifetime>,
) -> Result<Option<LabelId>, BodyDiagnostic> {
- let Some(lifetime) = lifetime else {
- return Ok(None)
- };
+ let Some(lifetime) = lifetime else { return Ok(None) };
let name = Name::new_lifetime(&lifetime);
for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
index cd6df0e63..5d71abe37 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
@@ -105,7 +105,7 @@ struct Printer<'a> {
needs_indent: bool,
}
-impl<'a> Write for Printer<'a> {
+impl Write for Printer<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') {
if self.needs_indent {
@@ -125,7 +125,7 @@ impl<'a> Write for Printer<'a> {
}
}
-impl<'a> Printer<'a> {
+impl Printer<'_> {
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
self.indent_level += 1;
wln!(self);
@@ -178,14 +178,6 @@ impl<'a> Printer<'a> {
w!(self, "loop ");
self.print_expr(*body);
}
- Expr::While { condition, body, label } => {
- if let Some(lbl) = label {
- w!(self, "{}: ", self.body[*lbl].name.display(self.db));
- }
- w!(self, "while ");
- self.print_expr(*condition);
- self.print_expr(*body);
- }
Expr::Call { callee, args, is_assignee_expr: _ } => {
self.print_expr(*callee);
w!(self, "(");
@@ -634,7 +626,7 @@ impl<'a> Printer<'a> {
match literal {
Literal::String(it) => w!(self, "{:?}", it),
Literal::ByteString(it) => w!(self, "\"{}\"", it.escape_ascii()),
- Literal::CString(it) => w!(self, "\"{}\\0\"", it),
+ Literal::CString(it) => w!(self, "\"{}\\0\"", it.escape_ascii()),
Literal::Char(it) => w!(self, "'{}'", it.escape_debug()),
Literal::Bool(it) => w!(self, "{}", it),
Literal::Int(i, suffix) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
index 69741c445..2a90a09f2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -228,11 +228,6 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
scopes.set_scope(expr, scope);
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
}
- Expr::While { condition, body: body_expr, label } => {
- let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
- compute_expr_scopes(*condition, body, scopes, &mut scope);
- compute_expr_scopes(*body_expr, body, scopes, &mut scope);
- }
Expr::Loop { body: body_expr, label } => {
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
index edee2c7ff..d55820116 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
@@ -3,12 +3,12 @@ mod block;
use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::Expect;
-use crate::ModuleDefId;
+use crate::{test_db::TestDB, ModuleDefId};
use super::*;
fn lower(ra_fixture: &str) -> Arc<Body> {
- let db = crate::test_db::TestDB::with_files(ra_fixture);
+ let db = TestDB::with_files(ra_fixture);
let krate = db.crate_graph().iter().next().unwrap();
let def_map = db.crate_def_map(krate);
@@ -25,15 +25,15 @@ fn lower(ra_fixture: &str) -> Arc<Body> {
db.body(fn_def.unwrap().into())
}
-fn block_def_map_at(ra_fixture: &str) -> String {
- let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+fn def_map_at(ra_fixture: &str) -> String {
+ let (db, position) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(position);
module.def_map(&db).dump(&db)
}
fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
- let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+ let (db, position) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(position);
let actual = module.def_map(&db).dump_block_scopes(&db);
@@ -41,7 +41,7 @@ fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
}
fn check_at(ra_fixture: &str, expect: Expect) {
- let actual = block_def_map_at(ra_fixture);
+ let actual = def_map_at(ra_fixture);
expect.assert_eq(&actual);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
index 6e77744f2..4e015a7fb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
@@ -134,6 +134,47 @@ struct Struct {}
}
#[test]
+fn super_imports_2() {
+ check_at(
+ r#"
+fn outer() {
+ mod m {
+ struct ResolveMe {}
+ fn middle() {
+ mod m2 {
+ fn inner() {
+ use super::ResolveMe;
+ $0
+ }
+ }
+ }
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ ResolveMe: t
+
+ block scope
+ m2: t
+
+ block scope::m2
+ inner: v
+
+ block scope
+ m: t
+
+ block scope::m
+ ResolveMe: t
+ middle: v
+
+ crate
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
fn nested_module_scoping() {
check_block_scopes_at(
r#"
@@ -156,6 +197,42 @@ fn f() {
}
#[test]
+fn self_imports() {
+ check_at(
+ r#"
+fn f() {
+ mod m {
+ struct ResolveMe {}
+ fn g() {
+ fn h() {
+ use self::ResolveMe;
+ $0
+ }
+ }
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ ResolveMe: t
+
+ block scope
+ h: v
+
+ block scope
+ m: t
+
+ block scope::m
+ ResolveMe: t
+ g: v
+
+ crate
+ f: v
+ "#]],
+ );
+}
+
+#[test]
fn legacy_macro_items() {
// Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
// correctly.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
index bb79e28f2..4cfd318a4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -14,8 +14,8 @@ use crate::{
item_scope::ItemScope,
nameres::DefMap,
src::{HasChildSource, HasSource},
- AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, FieldId, ImplId, Lookup, MacroId,
- ModuleDefId, ModuleId, TraitId, VariantId,
+ AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId, ImplId,
+ Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, VariantId,
};
pub trait ChildBySource {
@@ -91,6 +91,8 @@ impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
self.impls().for_each(|imp| add_impl(db, res, file_id, imp));
+ self.extern_crate_decls().for_each(|ext| add_extern_crate(db, res, file_id, ext));
+ self.use_decls().for_each(|ext| add_use(db, res, file_id, ext));
self.unnamed_consts().for_each(|konst| {
let loc = konst.lookup(db);
if loc.id.file_id() == file_id {
@@ -167,6 +169,23 @@ impl ChildBySource for ItemScope {
map[keys::IMPL].insert(loc.source(db).value, imp)
}
}
+ fn add_extern_crate(
+ db: &dyn DefDatabase,
+ map: &mut DynMap,
+ file_id: HirFileId,
+ ext: ExternCrateId,
+ ) {
+ let loc = ext.lookup(db);
+ if loc.id.file_id() == file_id {
+ map[keys::EXTERN_CRATE].insert(loc.source(db).value, ext)
+ }
+ }
+ fn add_use(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, ext: UseId) {
+ let loc = ext.lookup(db);
+ if loc.id.file_id() == file_id {
+ map[keys::USE].insert(loc.source(db).value, ext)
+ }
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
index 40e6a4308..91db68058 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -2,6 +2,7 @@
pub mod adt;
+use base_db::CrateId;
use hir_expand::{
name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind,
};
@@ -24,11 +25,12 @@ use crate::{
proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind},
DefMap, MacroSubNs,
},
+ path::ImportAlias,
type_ref::{TraitRef, TypeBound, TypeRef},
visibility::RawVisibility,
- AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
- Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
- StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
+ AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc,
+ HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId,
+ ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -424,6 +426,7 @@ impl MacroRulesData {
Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
}
}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProcMacroData {
pub name: Name,
@@ -461,6 +464,42 @@ impl ProcMacroData {
}
#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExternCrateDeclData {
+ pub name: Name,
+ pub alias: Option<ImportAlias>,
+ pub visibility: RawVisibility,
+ pub crate_id: Option<CrateId>,
+}
+
+impl ExternCrateDeclData {
+ pub(crate) fn extern_crate_decl_data_query(
+ db: &dyn DefDatabase,
+ extern_crate: ExternCrateId,
+ ) -> Arc<ExternCrateDeclData> {
+ let loc = extern_crate.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let extern_crate = &item_tree[loc.id.value];
+
+ let name = extern_crate.name.clone();
+ let crate_id = if name == hir_expand::name![self] {
+ Some(loc.container.krate())
+ } else {
+ db.crate_def_map(loc.container.krate())
+ .extern_prelude()
+ .find(|&(prelude_name, ..)| *prelude_name == name)
+ .map(|(_, root)| root.krate())
+ };
+
+ Arc::new(Self {
+ name: extern_crate.name.clone(),
+ visibility: item_tree[extern_crate.visibility].clone(),
+ alias: extern_crate.alias.clone(),
+ crate_id,
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ConstData {
/// `None` for `const _: () = ();`
pub name: Option<Name>,
@@ -573,7 +612,7 @@ impl<'a> AssocItemCollector<'a> {
if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
- InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()),
+ InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).erase()),
attrs.cfg().unwrap(),
self.expander.cfg_options().clone(),
));
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
index 6db5abccc..c8df3f3f9 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
@@ -18,7 +18,6 @@ use triomphe::Arc;
use crate::{
builtin_type::{BuiltinInt, BuiltinUint},
db::DefDatabase,
- expander::CfgExpander,
item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId},
lang_item::LangItem,
lower::LowerCtx,
@@ -29,8 +28,8 @@ use crate::{
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
type_ref::TypeRef,
visibility::RawVisibility,
- EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
- VariantId,
+ EnumId, EnumLoc, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId,
+ UnionId, VariantId,
};
/// Note that we use `StructData` for unions as well!
@@ -76,6 +75,7 @@ pub struct EnumData {
pub struct EnumVariantData {
pub name: Name,
pub variant_data: Arc<VariantData>,
+ pub tree_id: la_arena::Idx<crate::item_tree::Variant>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -147,6 +147,7 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
}
"C" => ReprFlags::IS_C,
"transparent" => ReprFlags::IS_TRANSPARENT,
+ "simd" => ReprFlags::IS_SIMD,
repr => {
if let Some(builtin) = BuiltinInt::from_suffix(repr)
.map(Either::Left)
@@ -325,11 +326,12 @@ impl EnumData {
variants.alloc(EnumVariantData {
name: var.name.clone(),
variant_data: Arc::new(var_data),
+ tree_id,
});
} else {
diagnostics.push(DefDiagnostic::unconfigured_code(
loc.container.local_id,
- InFile::new(loc.id.file_id(), var.ast_id.upcast()),
+ InFile::new(loc.id.file_id(), var.ast_id.erase()),
attrs.cfg().unwrap(),
cfg_options.clone(),
))
@@ -367,9 +369,10 @@ impl HasChildSource<LocalEnumVariantId> for EnumId {
&self,
db: &dyn DefDatabase,
) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> {
- let src = self.lookup(db).source(db);
+ let loc = &self.lookup(db);
+ let src = loc.source(db);
let mut trace = Trace::new_for_map();
- lower_enum(db, &mut trace, &src, self.lookup(db).container);
+ lower_enum(db, &mut trace, &src, loc);
src.with_value(trace.into_map())
}
}
@@ -378,31 +381,58 @@ fn lower_enum(
db: &dyn DefDatabase,
trace: &mut Trace<EnumVariantData, ast::Variant>,
ast: &InFile<ast::Enum>,
- module_id: ModuleId,
+ loc: &EnumLoc,
) {
- let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
+ let item_tree = loc.id.item_tree(db);
+ let krate = loc.container.krate;
+
+ let item_tree_variants = item_tree[loc.id.value].variants.clone();
+
+ let cfg_options = &db.crate_graph()[krate].cfg_options;
let variants = ast
.value
.variant_list()
.into_iter()
.flat_map(|it| it.variants())
- .filter(|var| expander.is_cfg_enabled(db, var));
- for var in variants {
+ .zip(item_tree_variants)
+ .filter(|&(_, item_tree_id)| {
+ item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options)
+ });
+ for (var, item_tree_id) in variants {
trace.alloc(
|| var.clone(),
|| EnumVariantData {
name: var.name().map_or_else(Name::missing, |it| it.as_name()),
- variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)),
+ variant_data: Arc::new(VariantData::new(
+ db,
+ ast.with_value(var.kind()),
+ loc.container,
+ &item_tree,
+ item_tree_id,
+ )),
+ tree_id: item_tree_id,
},
);
}
}
impl VariantData {
- fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self {
- let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate);
+ fn new(
+ db: &dyn DefDatabase,
+ flavor: InFile<ast::StructKind>,
+ module_id: ModuleId,
+ item_tree: &ItemTree,
+ variant: la_arena::Idx<crate::item_tree::Variant>,
+ ) -> Self {
let mut trace = Trace::new_for_arena();
- match lower_struct(db, &mut expander, &mut trace, &flavor) {
+ match lower_struct(
+ db,
+ &mut trace,
+ &flavor,
+ module_id.krate,
+ item_tree,
+ &item_tree[variant].fields,
+ ) {
StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
StructKind::Record => VariantData::Record(trace.into_arena()),
StructKind::Unit => VariantData::Unit,
@@ -434,28 +464,43 @@ impl HasChildSource<LocalFieldId> for VariantId {
type Value = Either<ast::TupleField, ast::RecordField>;
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
- let (src, module_id) = match self {
+ let item_tree;
+ let (src, fields, container) = match *self {
VariantId::EnumVariantId(it) => {
// I don't really like the fact that we call into parent source
// here, this might add to more queries then necessary.
+ let lookup = it.parent.lookup(db);
+ item_tree = lookup.id.item_tree(db);
let src = it.parent.child_source(db);
- (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container)
+ let tree_id = db.enum_data(it.parent).variants[it.local_id].tree_id;
+ let fields = &item_tree[tree_id].fields;
+ (src.map(|map| map[it.local_id].kind()), fields, lookup.container)
}
VariantId::StructId(it) => {
- (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container)
+ let lookup = it.lookup(db);
+ item_tree = lookup.id.item_tree(db);
+ (
+ lookup.source(db).map(|it| it.kind()),
+ &item_tree[lookup.id.value].fields,
+ lookup.container,
+ )
+ }
+ VariantId::UnionId(it) => {
+ let lookup = it.lookup(db);
+ item_tree = lookup.id.item_tree(db);
+ (
+ lookup.source(db).map(|it| {
+ it.record_field_list()
+ .map(ast::StructKind::Record)
+ .unwrap_or(ast::StructKind::Unit)
+ }),
+ &item_tree[lookup.id.value].fields,
+ lookup.container,
+ )
}
- VariantId::UnionId(it) => (
- it.lookup(db).source(db).map(|it| {
- it.record_field_list()
- .map(ast::StructKind::Record)
- .unwrap_or(ast::StructKind::Unit)
- }),
- it.lookup(db).container,
- ),
};
- let mut expander = CfgExpander::new(db, src.file_id, module_id.krate);
let mut trace = Trace::new_for_map();
- lower_struct(db, &mut expander, &mut trace, &src);
+ lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields);
src.with_value(trace.into_map())
}
}
@@ -469,16 +514,19 @@ pub enum StructKind {
fn lower_struct(
db: &dyn DefDatabase,
- expander: &mut CfgExpander,
trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
ast: &InFile<ast::StructKind>,
+ krate: CrateId,
+ item_tree: &ItemTree,
+ fields: &Fields,
) -> StructKind {
- let ctx = LowerCtx::new(db, &expander.hygiene(), ast.file_id);
+ let ctx = LowerCtx::with_file_id(db, ast.file_id);
- match &ast.value {
- ast::StructKind::Tuple(fl) => {
- for (i, fd) in fl.fields().enumerate() {
- if !expander.is_cfg_enabled(db, &fd) {
+ match (&ast.value, fields) {
+ (ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => {
+ let cfg_options = &db.crate_graph()[krate].cfg_options;
+ for ((i, fd), item_tree_id) in fl.fields().enumerate().zip(fields.clone()) {
+ if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) {
continue;
}
@@ -493,9 +541,10 @@ fn lower_struct(
}
StructKind::Tuple
}
- ast::StructKind::Record(fl) => {
- for fd in fl.fields() {
- if !expander.is_cfg_enabled(db, &fd) {
+ (ast::StructKind::Record(fl), Fields::Record(fields)) => {
+ let cfg_options = &db.crate_graph()[krate].cfg_options;
+ for (fd, item_tree_id) in fl.fields().zip(fields.clone()) {
+ if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) {
continue;
}
@@ -510,7 +559,7 @@ fn lower_struct(
}
StructKind::Record
}
- ast::StructKind::Unit => StructKind::Unit,
+ _ => StructKind::Unit,
}
}
@@ -539,8 +588,8 @@ fn lower_fields(
InFile::new(
current_file_id,
match field.ast_id {
- FieldAstId::Record(it) => it.upcast(),
- FieldAstId::Tuple(it) => it.upcast(),
+ FieldAstId::Record(it) => it.erase(),
+ FieldAstId::Tuple(it) => it.erase(),
},
),
attrs.cfg().unwrap(),
@@ -563,8 +612,8 @@ fn lower_fields(
InFile::new(
current_file_id,
match field.ast_id {
- FieldAstId::Record(it) => it.upcast(),
- FieldAstId::Tuple(it) => it.upcast(),
+ FieldAstId::Record(it) => it.erase(),
+ FieldAstId::Tuple(it) => it.erase(),
},
),
attrs.cfg().unwrap(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index 04ec47f84..e34a6768f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -12,27 +12,31 @@ use crate::{
body::{scope::ExprScopes, Body, BodySourceMap},
data::{
adt::{EnumData, StructData},
- ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
- TraitAliasData, TraitData, TypeAliasData,
+ ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
+ ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
},
generics::GenericParams,
import_map::ImportMap,
item_tree::{AttrOwner, ItemTree},
- lang_item::{LangItem, LangItemTarget, LangItems},
+ lang_item::{self, LangItem, LangItemTarget, LangItems},
nameres::{diagnostics::DefDiagnostic, DefMap},
visibility::{self, Visibility},
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
- EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId,
- ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc,
- MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
- StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId,
- UnionLoc, VariantId,
+ EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
+ FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId,
+ LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
+ StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc,
+ TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
};
#[salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase {
// region: items
#[salsa::interned]
+ fn intern_use(&self, loc: UseLoc) -> UseId;
+ #[salsa::interned]
+ fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
+ #[salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[salsa::interned]
fn intern_struct(&self, loc: StructLoc) -> StructId;
@@ -160,6 +164,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(ProcMacroData::proc_macro_data_query)]
fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
+ #[salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)]
+ fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>;
+
// endregion:data
#[salsa::invoke(Body::body_with_source_map_query)]
@@ -197,6 +204,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
+ #[salsa::invoke(lang_item::lang_attr_query)]
+ fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
+
#[salsa::transparent]
#[salsa::invoke(AttrsWithOwner::attrs_with_owner)]
fn attrs_with_owner(&self, def: AttrDefId) -> AttrsWithOwner;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs
index f30be6b64..d0f2bfab4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs
@@ -8,9 +8,9 @@ use syntax::{ast, AstNode, AstPtr};
use crate::{
dyn_map::{DynMap, Policy},
- ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
- MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, UnionId,
+ ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId, LifetimeParamId,
+ Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
+ TypeOrConstParamId, UnionId, UseId,
};
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
@@ -25,6 +25,8 @@ pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
pub const UNION: Key<ast::Union, UnionId> = Key::new();
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
+pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new();
+pub const USE: Key<ast::Use, UseId> = Key::new();
pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
index a588827c8..6db8398bc 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
@@ -15,18 +15,11 @@ use crate::{
MacroId, ModuleId,
};
-/// A subset of Expander that only deals with cfg attributes. We only need it to
-/// avoid cyclic queries in crate def map during enum processing.
#[derive(Debug)]
-pub(crate) struct CfgExpander {
+pub struct Expander {
cfg_options: CfgOptions,
hygiene: Hygiene,
krate: CrateId,
-}
-
-#[derive(Debug)]
-pub struct Expander {
- cfg_expander: CfgExpander,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
@@ -34,41 +27,23 @@ pub struct Expander {
recursion_limit: Limit,
}
-impl CfgExpander {
- pub(crate) fn new(
- db: &dyn DefDatabase,
- current_file_id: HirFileId,
- krate: CrateId,
- ) -> CfgExpander {
- let hygiene = Hygiene::new(db.upcast(), current_file_id);
- let cfg_options = db.crate_graph()[krate].cfg_options.clone();
- CfgExpander { cfg_options, hygiene, krate }
- }
-
- pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
- Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
- }
-
- pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool {
- let attrs = self.parse_attrs(db, owner);
- attrs.is_cfg_enabled(&self.cfg_options)
- }
-
- pub(crate) fn hygiene(&self) -> &Hygiene {
- &self.hygiene
- }
-}
-
impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
- let cfg_expander = CfgExpander::new(db, current_file_id, module.krate);
let recursion_limit = db.recursion_limit(module.krate);
#[cfg(not(test))]
let recursion_limit = Limit::new(recursion_limit as usize);
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
#[cfg(test)]
let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize));
- Expander { cfg_expander, current_file_id, module, recursion_depth: 0, recursion_limit }
+ Expander {
+ current_file_id,
+ module,
+ recursion_depth: 0,
+ recursion_limit,
+ cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
+ hygiene: Hygiene::new(db.upcast(), current_file_id),
+ krate: module.krate,
+ }
}
pub fn enter_expand<T: ast::AstNode>(
@@ -120,7 +95,7 @@ impl Expander {
}
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
- self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
+ self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@@ -135,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
- LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id)
+ LowerCtx::new(db, &self.hygiene, self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@@ -143,11 +118,11 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
- self.cfg_expander.parse_attrs(db, owner)
+ Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
- &self.cfg_expander.cfg_options
+ &self.cfg_options
}
pub fn current_file_id(&self) -> HirFileId {
@@ -155,7 +130,7 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
- let ctx = LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id);
+ let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
Path::from_src(path, &ctx)
}
@@ -189,18 +164,26 @@ impl Expander {
return ExpandResult { value: None, err };
};
- Self::enter_expand_inner(db, call_id, err).map(|value| {
- value.and_then(|InFile { file_id, value }| {
- let parse = value.cast::<T>()?;
-
- self.recursion_depth += 1;
- self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
- let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
- let mark =
- Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
- Some((mark, parse))
- })
- })
+ let res = Self::enter_expand_inner(db, call_id, err);
+ match res.err {
+ // If proc-macro is disabled or unresolved, we want to expand to a missing expression
+ // instead of an empty tree which might end up in an empty block.
+ Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None),
+ _ => res.map(|value| {
+ value.and_then(|InFile { file_id, value }| {
+ let parse = value.cast::<T>()?;
+
+ self.recursion_depth += 1;
+ self.hygiene = Hygiene::new(db.upcast(), file_id);
+ let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
+ let mark = Mark {
+ file_id: old_file_id,
+ bomb: DropBomb::new("expansion mark dropped"),
+ };
+ Some((mark, parse))
+ })
+ }),
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
index 8c49ae1c4..df2af4c89 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -360,7 +360,7 @@ fn calculate_best_path(
prefer_no_std,
)?;
cov_mark::hit!(partially_imported);
- path.push_segment(info.path.segments.last()?.clone());
+ path.push_segment(info.name.clone());
Some(path)
})
});
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index f19c3f028..d7d44e413 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -67,21 +67,21 @@ pub enum TypeOrConstParamData {
impl TypeOrConstParamData {
pub fn name(&self) -> Option<&Name> {
match self {
- TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
- TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
+ TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(),
+ TypeOrConstParamData::ConstParamData(it) => Some(&it.name),
}
}
pub fn has_default(&self) -> bool {
match self {
- TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
- TypeOrConstParamData::ConstParamData(x) => x.has_default,
+ TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
+ TypeOrConstParamData::ConstParamData(it) => it.has_default,
}
}
pub fn type_param(&self) -> Option<&TypeParamData> {
match self {
- TypeOrConstParamData::TypeParamData(x) => Some(x),
+ TypeOrConstParamData::TypeParamData(it) => Some(it),
TypeOrConstParamData::ConstParamData(_) => None,
}
}
@@ -89,14 +89,14 @@ impl TypeOrConstParamData {
pub fn const_param(&self) -> Option<&ConstParamData> {
match self {
TypeOrConstParamData::TypeParamData(_) => None,
- TypeOrConstParamData::ConstParamData(x) => Some(x),
+ TypeOrConstParamData::ConstParamData(it) => Some(it),
}
}
pub fn is_trait_self(&self) -> bool {
match self {
- TypeOrConstParamData::TypeParamData(x) => {
- x.provenance == TypeParamProvenance::TraitSelf
+ TypeOrConstParamData::TypeParamData(it) => {
+ it.provenance == TypeParamProvenance::TraitSelf
}
TypeOrConstParamData::ConstParamData(_) => false,
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
index 500e88006..6591c92ac 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
@@ -85,7 +85,7 @@ impl fmt::Display for FloatTypeWrapper {
pub enum Literal {
String(Box<str>),
ByteString(Box<[u8]>),
- CString(Box<str>),
+ CString(Box<[u8]>),
Char(char),
Bool(bool),
Int(i128, Option<BuiltinInt>),
@@ -191,11 +191,6 @@ pub enum Expr {
body: ExprId,
label: Option<LabelId>,
},
- While {
- condition: ExprId,
- body: ExprId,
- label: Option<LabelId>,
- },
Call {
callee: ExprId,
args: Box<[ExprId]>,
@@ -379,10 +374,6 @@ impl Expr {
}
}
Expr::Loop { body, .. } => f(*body),
- Expr::While { condition, body, .. } => {
- f(*condition);
- f(*body);
- }
Expr::Call { callee, args, .. } => {
f(*callee);
args.iter().copied().for_each(f);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
index fa1f4933a..57f023ef3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
@@ -425,8 +425,8 @@ impl ConstRef {
}
match expr {
ast::Expr::PathExpr(p) if is_path_ident(&p) => {
- match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
- Some(x) => Self::Path(x.as_name()),
+ match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) {
+ Some(it) => Self::Path(it.as_name()),
None => Self::Scalar(LiteralConstRef::Unknown),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index 48532655e..4b2e5041a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -1,13 +1,14 @@
//! A map of all publicly exported items in a crate.
+use std::collections::hash_map::Entry;
use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId;
use fst::{self, Streamer};
use hir_expand::name::Name;
-use indexmap::{map::Entry, IndexMap};
+use indexmap::IndexMap;
use itertools::Itertools;
-use rustc_hash::{FxHashSet, FxHasher};
+use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use triomphe::Arc;
use crate::{
@@ -17,52 +18,23 @@ use crate::{
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
+// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should
+// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`.
/// Item import details stored in the `ImportMap`.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ImportInfo {
- /// A path that can be used to import the item, relative to the crate's root.
- pub path: ImportPath,
+ /// A name that can be used to import the item, relative to the crate's root.
+ pub name: Name,
/// The module containing this item.
pub container: ModuleId,
/// Whether the import is a trait associated item or not.
pub is_trait_assoc_item: bool,
}
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct ImportPath {
- pub segments: Vec<Name>,
-}
-
-impl ImportPath {
- pub fn display<'a>(&'a self, db: &'a dyn DefDatabase) -> impl fmt::Display + 'a {
- struct Display<'a> {
- db: &'a dyn DefDatabase,
- path: &'a ImportPath,
- }
- impl fmt::Display for Display<'_> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(
- &self.path.segments.iter().map(|it| it.display(self.db.upcast())).format("::"),
- f,
- )
- }
- }
- Display { db, path: self }
- }
-
- fn len(&self) -> usize {
- self.segments.len()
- }
-}
-
-/// A map from publicly exported items to the path needed to import/name them from a downstream
-/// crate.
+/// A map from publicly exported items to its name.
///
/// Reexports of items are taken into account, ie. if something is exported under multiple
/// names, the one with the shortest import path will be used.
-///
-/// Note that all paths are relative to the containing crate's root, so the crate name still needs
-/// to be prepended to the `ModPath` before the path is valid.
#[derive(Default)]
pub struct ImportMap {
map: FxIndexMap<ItemInNs, ImportInfo>,
@@ -70,122 +42,58 @@ pub struct ImportMap {
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
/// values returned by running `fst`.
///
- /// Since a path can refer to multiple items due to namespacing, we store all items with the
- /// same path right after each other. This allows us to find all items after the FST gives us
+ /// Since a name can refer to multiple items due to namespacing, we store all items with the
+ /// same name right after each other. This allows us to find all items after the FST gives us
/// the index of the first one.
importables: Vec<ItemInNs>,
fst: fst::Map<Vec<u8>>,
}
impl ImportMap {
- pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
+ pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query");
- let mut import_map = collect_import_map(db, krate);
+ let map = collect_import_map(db, krate);
- let mut importables = import_map
- .map
+ let mut importables: Vec<_> = map
.iter()
- .map(|(item, info)| (item, fst_path(db, &info.path)))
- .collect::<Vec<_>>();
- importables.sort_by(|(_, fst_path), (_, fst_path2)| fst_path.cmp(fst_path2));
+ // We've only collected items, whose name cannot be tuple field.
+ .map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
+ .collect();
+ importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
// Build the FST, taking care not to insert duplicate values.
-
let mut builder = fst::MapBuilder::memory();
- let mut last_batch_start = 0;
-
- for idx in 0..importables.len() {
- let key = &importables[last_batch_start].1;
- if let Some((_, fst_path)) = importables.get(idx + 1) {
- if key == fst_path {
- continue;
- }
- }
-
- let _ = builder.insert(key, last_batch_start as u64);
-
- last_batch_start = idx + 1;
+ let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1);
+ for (start_idx, (_, name)) in iter {
+ let _ = builder.insert(name, start_idx as u64);
}
- import_map.fst = builder.into_map();
- import_map.importables = importables.iter().map(|&(&item, _)| item).collect();
-
- Arc::new(import_map)
- }
-
- /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root.
- pub fn path_of(&self, item: ItemInNs) -> Option<&ImportPath> {
- self.import_info_for(item).map(|it| &it.path)
+ Arc::new(ImportMap {
+ map,
+ fst: builder.into_map(),
+ importables: importables.into_iter().map(|(item, _)| item).collect(),
+ })
}
pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
self.map.get(&item)
}
-
- #[cfg(test)]
- fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
- let mut importable_paths: Vec<_> = self
- .map
- .iter()
- .map(|(item, info)| {
- let ns = match item {
- ItemInNs::Types(_) => "t",
- ItemInNs::Values(_) => "v",
- ItemInNs::Macros(_) => "m",
- };
- format!("- {} ({ns})", info.path.display(db))
- })
- .collect();
-
- importable_paths.sort();
- importable_paths.join("\n")
- }
-
- fn collect_trait_assoc_items(
- &mut self,
- db: &dyn DefDatabase,
- tr: TraitId,
- is_type_in_ns: bool,
- original_import_info: &ImportInfo,
- ) {
- let _p = profile::span("collect_trait_assoc_items");
- for (assoc_item_name, item) in &db.trait_data(tr).items {
- let module_def_id = match item {
- AssocItemId::FunctionId(f) => ModuleDefId::from(*f),
- AssocItemId::ConstId(c) => ModuleDefId::from(*c),
- // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
- // qualifier, ergo no need to store it for imports in import_map
- AssocItemId::TypeAliasId(_) => {
- cov_mark::hit!(type_aliases_ignored);
- continue;
- }
- };
- let assoc_item = if is_type_in_ns {
- ItemInNs::Types(module_def_id)
- } else {
- ItemInNs::Values(module_def_id)
- };
-
- let mut assoc_item_info = original_import_info.clone();
- assoc_item_info.path.segments.push(assoc_item_name.to_owned());
- assoc_item_info.is_trait_assoc_item = true;
- self.map.insert(assoc_item, assoc_item_info);
- }
- }
}
-fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
+fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> {
let _p = profile::span("collect_import_map");
let def_map = db.crate_def_map(krate);
- let mut import_map = ImportMap::default();
+ let mut map = FxIndexMap::default();
// We look only into modules that are public(ly reexported), starting with the crate root.
- let empty = ImportPath { segments: vec![] };
let root = def_map.module_id(DefMap::ROOT);
- let mut worklist = vec![(root, empty)];
- while let Some((module, mod_path)) = worklist.pop() {
+ let mut worklist = vec![(root, 0)];
+ // Records items' minimum module depth.
+ let mut depth_map = FxHashMap::default();
+
+ while let Some((module, depth)) = worklist.pop() {
let ext_def_map;
let mod_data = if module.krate == krate {
&def_map[module.local_id]
@@ -201,52 +109,83 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
});
for (name, per_ns) in visible_items {
- let mk_path = || {
- let mut path = mod_path.clone();
- path.segments.push(name.clone());
- path
- };
-
for item in per_ns.iter_items() {
- let path = mk_path();
- let path_len = path.len();
- let import_info =
- ImportInfo { path, container: module, is_trait_assoc_item: false };
-
- if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
- import_map.collect_trait_assoc_items(
- db,
- tr,
- matches!(item, ItemInNs::Types(_)),
- &import_info,
- );
- }
+ let import_info = ImportInfo {
+ name: name.clone(),
+ container: module,
+ is_trait_assoc_item: false,
+ };
- match import_map.map.entry(item) {
+ match depth_map.entry(item) {
Entry::Vacant(entry) => {
- entry.insert(import_info);
+ entry.insert(depth);
}
Entry::Occupied(mut entry) => {
- // If the new path is shorter, prefer that one.
- if path_len < entry.get().path.len() {
- *entry.get_mut() = import_info;
+ if depth < *entry.get() {
+ entry.insert(depth);
} else {
continue;
}
}
}
- // If we've just added a path to a module, descend into it. We might traverse
- // modules multiple times, but only if the new path to it is shorter than the
- // first (else we `continue` above).
+ if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
+ collect_trait_assoc_items(
+ db,
+ &mut map,
+ tr,
+ matches!(item, ItemInNs::Types(_)),
+ &import_info,
+ );
+ }
+
+ map.insert(item, import_info);
+
+ // If we've just added a module, descend into it. We might traverse modules
+ // multiple times, but only if the module depth is smaller (else we `continue`
+ // above).
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
- worklist.push((mod_id, mk_path()));
+ worklist.push((mod_id, depth + 1));
}
}
}
}
- import_map
+ map
+}
+
+fn collect_trait_assoc_items(
+ db: &dyn DefDatabase,
+ map: &mut FxIndexMap<ItemInNs, ImportInfo>,
+ tr: TraitId,
+ is_type_in_ns: bool,
+ trait_import_info: &ImportInfo,
+) {
+ let _p = profile::span("collect_trait_assoc_items");
+ for (assoc_item_name, item) in &db.trait_data(tr).items {
+ let module_def_id = match item {
+ AssocItemId::FunctionId(f) => ModuleDefId::from(*f),
+ AssocItemId::ConstId(c) => ModuleDefId::from(*c),
+ // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
+ // qualifier, ergo no need to store it for imports in import_map
+ AssocItemId::TypeAliasId(_) => {
+ cov_mark::hit!(type_aliases_ignored);
+ continue;
+ }
+ };
+ let assoc_item = if is_type_in_ns {
+ ItemInNs::Types(module_def_id)
+ } else {
+ ItemInNs::Values(module_def_id)
+ };
+
+ let assoc_item_info = ImportInfo {
+ container: trait_import_info.container,
+ name: assoc_item_name.clone(),
+ is_trait_assoc_item: true,
+ };
+ map.insert(assoc_item, assoc_item_info);
+ }
}
impl PartialEq for ImportMap {
@@ -260,7 +199,7 @@ impl Eq for ImportMap {}
impl fmt::Debug for ImportMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut importable_paths: Vec<_> = self
+ let mut importable_names: Vec<_> = self
.map
.iter()
.map(|(item, _)| match item {
@@ -270,56 +209,40 @@ impl fmt::Debug for ImportMap {
})
.collect();
- importable_paths.sort();
- f.write_str(&importable_paths.join("\n"))
+ importable_names.sort();
+ f.write_str(&importable_names.join("\n"))
}
}
-fn fst_path(db: &dyn DefDatabase, path: &ImportPath) -> String {
- let _p = profile::span("fst_path");
- let mut s = path.display(db).to_string();
- s.make_ascii_lowercase();
- s
-}
-
-#[derive(Debug, Eq, PartialEq, Hash)]
-pub enum ImportKind {
- Module,
- Function,
- Adt,
- EnumVariant,
- Const,
- Static,
- Trait,
- TraitAlias,
- TypeAlias,
- BuiltinType,
- AssociatedItem,
- Macro,
-}
-
/// A way to match import map contents against the search query.
#[derive(Debug)]
-pub enum SearchMode {
+enum SearchMode {
/// Import map entry should strictly match the query string.
- Equals,
- /// Import map entry should contain the query string.
- Contains,
+ Exact,
/// Import map entry should contain all letters from the query string,
/// in the same order, but not necessary adjacent.
Fuzzy,
}
+/// Three possible ways to search for the name in associated and/or other items.
+#[derive(Debug, Clone, Copy)]
+pub enum AssocSearchMode {
+ /// Search for the name in both associated and other items.
+ Include,
+ /// Search for the name in other items only.
+ Exclude,
+ /// Search for the name in the associated items only.
+ AssocItemsOnly,
+}
+
#[derive(Debug)]
pub struct Query {
query: String,
lowercased: String,
- name_only: bool,
- assoc_items_only: bool,
search_mode: SearchMode,
+ assoc_mode: AssocSearchMode,
case_sensitive: bool,
limit: usize,
- exclude_import_kinds: FxHashSet<ImportKind>,
}
impl Query {
@@ -328,30 +251,21 @@ impl Query {
Self {
query,
lowercased,
- name_only: false,
- assoc_items_only: false,
- search_mode: SearchMode::Contains,
+ search_mode: SearchMode::Exact,
+ assoc_mode: AssocSearchMode::Include,
case_sensitive: false,
- limit: usize::max_value(),
- exclude_import_kinds: FxHashSet::default(),
+ limit: usize::MAX,
}
}
- /// Matches entries' names only, ignoring the rest of
- /// the qualifier.
- /// Example: for `std::marker::PhantomData`, the name is `PhantomData`.
- pub fn name_only(self) -> Self {
- Self { name_only: true, ..self }
- }
-
- /// Matches only the entries that are associated items, ignoring the rest.
- pub fn assoc_items_only(self) -> Self {
- Self { assoc_items_only: true, ..self }
+ /// Fuzzy finds items instead of exact matching.
+ pub fn fuzzy(self) -> Self {
+ Self { search_mode: SearchMode::Fuzzy, ..self }
}
- /// Specifies the way to search for the entries using the query.
- pub fn search_mode(self, search_mode: SearchMode) -> Self {
- Self { search_mode, ..self }
+ /// Specifies whether we want to include associated items in the result.
+ pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self {
+ Self { assoc_mode, ..self }
}
/// Limits the returned number of items to `limit`.
@@ -364,12 +278,6 @@ impl Query {
Self { case_sensitive: true, ..self }
}
- /// Do not include imports of the specified kind in the search results.
- pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self {
- self.exclude_import_kinds.insert(import_kind);
- self
- }
-
fn import_matches(
&self,
db: &dyn DefDatabase,
@@ -377,49 +285,36 @@ impl Query {
enforce_lowercase: bool,
) -> bool {
let _p = profile::span("import_map::Query::import_matches");
- if import.is_trait_assoc_item {
- if self.exclude_import_kinds.contains(&ImportKind::AssociatedItem) {
- return false;
- }
- } else if self.assoc_items_only {
- return false;
+ match (import.is_trait_assoc_item, self.assoc_mode) {
+ (true, AssocSearchMode::Exclude) => return false,
+ (false, AssocSearchMode::AssocItemsOnly) => return false,
+ _ => {}
}
- let mut input = if import.is_trait_assoc_item || self.name_only {
- import.path.segments.last().unwrap().display(db.upcast()).to_string()
- } else {
- import.path.display(db).to_string()
- };
- if enforce_lowercase || !self.case_sensitive {
+ let mut input = import.name.display(db.upcast()).to_string();
+ let case_insensitive = enforce_lowercase || !self.case_sensitive;
+ if case_insensitive {
input.make_ascii_lowercase();
}
- let query_string =
- if !enforce_lowercase && self.case_sensitive { &self.query } else { &self.lowercased };
+ let query_string = if case_insensitive { &self.lowercased } else { &self.query };
match self.search_mode {
- SearchMode::Equals => &input == query_string,
- SearchMode::Contains => input.contains(query_string),
+ SearchMode::Exact => &input == query_string,
SearchMode::Fuzzy => {
- let mut unchecked_query_chars = query_string.chars();
- let mut mismatching_query_char = unchecked_query_chars.next();
-
- for input_char in input.chars() {
- match mismatching_query_char {
- None => return true,
- Some(matching_query_char) if matching_query_char == input_char => {
- mismatching_query_char = unchecked_query_chars.next();
- }
- _ => (),
+ let mut input_chars = input.chars();
+ for query_char in query_string.chars() {
+ if input_chars.find(|&it| it == query_char).is_none() {
+ return false;
}
}
- mismatching_query_char.is_none()
+ true
}
}
}
}
-/// Searches dependencies of `krate` for an importable path matching `query`.
+/// Searches dependencies of `krate` for an importable name matching `query`.
///
/// This returns a list of items that could be imported from dependencies of `krate`.
pub fn search_dependencies(
@@ -442,65 +337,44 @@ pub fn search_dependencies(
let mut stream = op.union();
- let mut all_indexed_values = FxHashSet::default();
- while let Some((_, indexed_values)) = stream.next() {
- all_indexed_values.extend(indexed_values.iter().copied());
- }
-
let mut res = FxHashSet::default();
- for indexed_value in all_indexed_values {
- let import_map = &import_maps[indexed_value.index];
- let importables = &import_map.importables[indexed_value.value as usize..];
+ while let Some((_, indexed_values)) = stream.next() {
+ for indexed_value in indexed_values {
+ let import_map = &import_maps[indexed_value.index];
+ let importables = &import_map.importables[indexed_value.value as usize..];
- let common_importable_data = &import_map.map[&importables[0]];
- if !query.import_matches(db, common_importable_data, true) {
- continue;
- }
+ let common_importable_data = &import_map.map[&importables[0]];
+ if !query.import_matches(db, common_importable_data, true) {
+ continue;
+ }
- // Path shared by the importable items in this group.
- let common_importables_path_fst = fst_path(db, &common_importable_data.path);
- // Add the items from this `ModPath` group. Those are all subsequent items in
- // `importables` whose paths match `path`.
- let iter = importables
- .iter()
- .copied()
- .take_while(|item| {
- common_importables_path_fst == fst_path(db, &import_map.map[item].path)
- })
- .filter(|&item| match item_import_kind(item) {
- Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind),
- None => true,
- })
- .filter(|item| {
- !query.case_sensitive // we've already checked the common importables path case-insensitively
+ // Name shared by the importable items in this group.
+ let common_importable_name =
+ common_importable_data.name.to_smol_str().to_ascii_lowercase();
+ // Add the items from this name group. Those are all subsequent items in
+ // `importables` whose name match `common_importable_name`.
+ let iter = importables
+ .iter()
+ .copied()
+ .take_while(|item| {
+ common_importable_name
+ == import_map.map[item].name.to_smol_str().to_ascii_lowercase()
+ })
+ .filter(|item| {
+ !query.case_sensitive // we've already checked the common importables name case-insensitively
|| query.import_matches(db, &import_map.map[item], false)
- });
- res.extend(iter);
+ });
+ res.extend(iter);
- if res.len() >= query.limit {
- return res;
+ if res.len() >= query.limit {
+ return res;
+ }
}
}
res
}
-fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
- Some(match item.as_module_def_id()? {
- ModuleDefId::ModuleId(_) => ImportKind::Module,
- ModuleDefId::FunctionId(_) => ImportKind::Function,
- ModuleDefId::AdtId(_) => ImportKind::Adt,
- ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant,
- ModuleDefId::ConstId(_) => ImportKind::Const,
- ModuleDefId::StaticId(_) => ImportKind::Static,
- ModuleDefId::TraitId(_) => ImportKind::Trait,
- ModuleDefId::TraitAliasId(_) => ImportKind::TraitAlias,
- ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
- ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
- ModuleDefId::MacroId(_) => ImportKind::Macro,
- })
-}
-
#[cfg(test)]
mod tests {
use base_db::{fixture::WithFixture, SourceDatabase, Upcast};
@@ -510,16 +384,39 @@ mod tests {
use super::*;
+ impl ImportMap {
+ fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
+ let mut importable_paths: Vec<_> = self
+ .map
+ .iter()
+ .map(|(item, info)| {
+ let path = render_path(db, info);
+ let ns = match item {
+ ItemInNs::Types(_) => "t",
+ ItemInNs::Values(_) => "v",
+ ItemInNs::Macros(_) => "m",
+ };
+ format!("- {path} ({ns})")
+ })
+ .collect();
+
+ importable_paths.sort();
+ importable_paths.join("\n")
+ }
+ }
+
fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
let db = TestDB::with_files(ra_fixture);
let crate_graph = db.crate_graph();
let krate = crate_graph
.iter()
- .find(|krate| {
- crate_graph[*krate].display_name.as_ref().map(|n| n.to_string())
- == Some(crate_name.to_string())
+ .find(|&krate| {
+ crate_graph[krate]
+ .display_name
+ .as_ref()
+ .is_some_and(|it| &**it.crate_name() == crate_name)
})
- .unwrap();
+ .expect("could not find crate");
let actual = search_dependencies(db.upcast(), krate, query)
.into_iter()
@@ -530,7 +427,7 @@ mod tests {
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
Some(assoc_item_path) => (assoc_item_path, "a"),
None => (
- dependency_imports.path_of(dependency)?.display(&db).to_string(),
+ render_path(&db, dependency_imports.import_info_for(dependency)?),
match dependency {
ItemInNs::Types(ModuleDefId::FunctionId(_))
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
@@ -560,57 +457,25 @@ mod tests {
dependency_imports: &ImportMap,
dependency: ItemInNs,
) -> Option<String> {
- let dependency_assoc_item_id = match dependency {
- ItemInNs::Types(ModuleDefId::FunctionId(id))
- | ItemInNs::Values(ModuleDefId::FunctionId(id)) => AssocItemId::from(id),
- ItemInNs::Types(ModuleDefId::ConstId(id))
- | ItemInNs::Values(ModuleDefId::ConstId(id)) => AssocItemId::from(id),
- ItemInNs::Types(ModuleDefId::TypeAliasId(id))
- | ItemInNs::Values(ModuleDefId::TypeAliasId(id)) => AssocItemId::from(id),
+ let (dependency_assoc_item_id, container) = match dependency.as_module_def_id()? {
+ ModuleDefId::FunctionId(id) => (AssocItemId::from(id), id.lookup(db).container),
+ ModuleDefId::ConstId(id) => (AssocItemId::from(id), id.lookup(db).container),
+ ModuleDefId::TypeAliasId(id) => (AssocItemId::from(id), id.lookup(db).container),
_ => return None,
};
- let trait_ = assoc_to_trait(db, dependency)?;
- if let ModuleDefId::TraitId(tr) = trait_.as_module_def_id()? {
- let trait_data = db.trait_data(tr);
- let assoc_item_name =
- trait_data.items.iter().find_map(|(assoc_item_name, assoc_item_id)| {
- if &dependency_assoc_item_id == assoc_item_id {
- Some(assoc_item_name)
- } else {
- None
- }
- })?;
- return Some(format!(
- "{}::{}",
- dependency_imports.path_of(trait_)?.display(db),
- assoc_item_name.display(db.upcast())
- ));
- }
- None
- }
-
- fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> {
- let assoc: AssocItemId = match item {
- ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
- ModuleDefId::TypeAliasId(it) => it.into(),
- ModuleDefId::FunctionId(it) => it.into(),
- ModuleDefId::ConstId(it) => it.into(),
- _ => return None,
- },
- _ => return None,
+ let ItemContainerId::TraitId(trait_id) = container else {
+ return None;
};
- let container = match assoc {
- AssocItemId::FunctionId(it) => it.lookup(db).container,
- AssocItemId::ConstId(it) => it.lookup(db).container,
- AssocItemId::TypeAliasId(it) => it.lookup(db).container,
- };
+ let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?;
- match container {
- ItemContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())),
- _ => None,
- }
+ let trait_data = db.trait_data(trait_id);
+ let (assoc_item_name, _) = trait_data
+ .items
+ .iter()
+ .find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
+ Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast())))
}
fn check(ra_fixture: &str, expect: Expect) {
@@ -633,6 +498,24 @@ mod tests {
expect.assert_eq(&actual)
}
+ fn render_path(db: &dyn DefDatabase, info: &ImportInfo) -> String {
+ let mut module = info.container;
+ let mut segments = vec![&info.name];
+
+ let def_map = module.def_map(db);
+ assert!(def_map.block_id().is_none(), "block local items should not be in `ImportMap`");
+
+ while let Some(parent) = module.containing_module(db) {
+ let parent_data = &def_map[parent.local_id];
+ let (name, _) =
+ parent_data.children.iter().find(|(_, id)| **id == module.local_id).unwrap();
+ segments.push(name);
+ module = parent;
+ }
+
+ segments.iter().rev().map(|it| it.display(db.upcast())).join("::")
+ }
+
#[test]
fn smoke() {
check(
@@ -749,6 +632,7 @@ mod tests {
#[test]
fn module_reexport() {
// Reexporting modules from a dependency adds all contents to the import map.
+ // XXX: The rendered paths are relative to the defining crate.
check(
r"
//- /main.rs crate:main deps:lib
@@ -764,9 +648,9 @@ mod tests {
- module::S (t)
- module::S (v)
main:
+ - module::S (t)
+ - module::S (v)
- reexported_module (t)
- - reexported_module::S (t)
- - reexported_module::S (v)
"#]],
);
}
@@ -868,10 +752,9 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ Query::new("fmt".to_string()).fuzzy(),
expect![[r#"
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::FMT_CONST (a)
dep::fmt::Display::format_function (a)
dep::fmt::Display::format_method (a)
@@ -898,7 +781,9 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy).assoc_items_only(),
+ Query::new("fmt".to_string())
+ .fuzzy()
+ .assoc_search_mode(AssocSearchMode::AssocItemsOnly),
expect![[r#"
dep::fmt::Display::FMT_CONST (a)
dep::fmt::Display::format_function (a)
@@ -909,23 +794,10 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string())
- .search_mode(SearchMode::Fuzzy)
- .exclude_import_kind(ImportKind::AssociatedItem),
+ Query::new("fmt".to_string()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude),
expect![[r#"
- dep::fmt (t)
- dep::fmt::Display (t)
- "#]],
- );
-
- check_search(
- ra_fixture,
- "main",
- Query::new("fmt".to_string())
- .search_mode(SearchMode::Fuzzy)
- .assoc_items_only()
- .exclude_import_kind(ImportKind::AssociatedItem),
- expect![[r#""#]],
+ dep::fmt (t)
+ "#]],
);
}
@@ -958,13 +830,12 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ Query::new("fmt".to_string()).fuzzy(),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::fmt (a)
dep::format (f)
"#]],
@@ -973,26 +844,12 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Equals),
- expect![[r#"
- dep::Fmt (m)
- dep::Fmt (t)
- dep::Fmt (v)
- dep::fmt (t)
- dep::fmt::Display::fmt (a)
- "#]],
- );
-
- check_search(
- ra_fixture,
- "main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Contains),
+ Query::new("fmt".to_string()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::fmt (a)
"#]],
);
@@ -1033,7 +890,6 @@ mod tests {
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::fmt (a)
"#]],
);
@@ -1041,7 +897,7 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).name_only(),
+ Query::new("fmt".to_string()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
@@ -1106,43 +962,10 @@ mod tests {
pub fn no() {}
"#,
"main",
- Query::new("".to_string()).limit(2),
- expect![[r#"
- dep::Fmt (m)
- dep::Fmt (t)
- dep::Fmt (v)
- dep::fmt (t)
- "#]],
- );
- }
-
- #[test]
- fn search_exclusions() {
- let ra_fixture = r#"
- //- /main.rs crate:main deps:dep
- //- /dep.rs crate:dep
-
- pub struct fmt;
- pub struct FMT;
- "#;
-
- check_search(
- ra_fixture,
- "main",
- Query::new("FMT".to_string()),
+ Query::new("".to_string()).fuzzy().limit(1),
expect![[r#"
- dep::FMT (t)
- dep::FMT (v)
- dep::fmt (t)
- dep::fmt (v)
+ dep::fmt::Display (t)
"#]],
);
-
- check_search(
- ra_fixture,
- "main",
- Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt),
- expect![[r#""#]],
- );
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index 2001fb29a..873accafb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -14,8 +14,9 @@ use stdx::format_to;
use syntax::ast;
use crate::{
- db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule,
- ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+ db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId,
+ ExternCrateId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+ UseId,
};
#[derive(Copy, Clone, Debug)]
@@ -50,6 +51,7 @@ pub struct ItemScope {
unnamed_consts: Vec<ConstId>,
/// Traits imported via `use Trait as _;`.
unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
+ extern_crate_decls: Vec<ExternCrateId>,
/// Macros visible in current module in legacy textual scope
///
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
@@ -112,6 +114,17 @@ impl ItemScope {
self.declarations.iter().copied()
}
+ pub fn extern_crate_decls(
+ &self,
+ ) -> impl Iterator<Item = ExternCrateId> + ExactSizeIterator + '_ {
+ self.extern_crate_decls.iter().copied()
+ }
+
+ pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ {
+ // FIXME: to be implemented
+ std::iter::empty()
+ }
+
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
self.impls.iter().copied()
}
@@ -188,7 +201,11 @@ impl ItemScope {
}
pub(crate) fn define_impl(&mut self, imp: ImplId) {
- self.impls.push(imp)
+ self.impls.push(imp);
+ }
+
+ pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) {
+ self.extern_crate_decls.push(extern_crate);
}
pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) {
@@ -397,6 +414,7 @@ impl ItemScope {
legacy_macros,
attr_macros,
derive_macros,
+ extern_crate_decls,
} = self;
types.shrink_to_fit();
values.shrink_to_fit();
@@ -409,6 +427,7 @@ impl ItemScope {
legacy_macros.shrink_to_fit();
attr_macros.shrink_to_fit();
derive_macros.shrink_to_fit();
+ extern_crate_decls.shrink_to_fit();
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index e74b71888..c9b0f75f1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -46,7 +46,7 @@ use ast::{AstNode, HasName, StructKind};
use base_db::CrateId;
use either::Either;
use hir_expand::{
- ast_id_map::FileAstId,
+ ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
hygiene::Hygiene,
name::{name, AsName, Name},
@@ -188,7 +188,7 @@ impl ItemTree {
fn shrink_to_fit(&mut self) {
if let Some(data) = &mut self.data {
let ItemTreeData {
- imports,
+ uses,
extern_crates,
extern_blocks,
functions,
@@ -211,7 +211,7 @@ impl ItemTree {
vis,
} = &mut **data;
- imports.shrink_to_fit();
+ uses.shrink_to_fit();
extern_crates.shrink_to_fit();
extern_blocks.shrink_to_fit();
functions.shrink_to_fit();
@@ -262,7 +262,7 @@ static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(P
#[derive(Default, Debug, Eq, PartialEq)]
struct ItemTreeData {
- imports: Arena<Import>,
+ uses: Arena<Use>,
extern_crates: Arena<ExternCrate>,
extern_blocks: Arena<ExternBlock>,
functions: Arena<Function>,
@@ -314,7 +314,7 @@ from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Id
/// Trait implemented by all item nodes in the item tree.
pub trait ItemTreeNode: Clone {
- type Source: AstNode + Into<ast::Item>;
+ type Source: AstIdNode + Into<ast::Item>;
fn ast_id(&self) -> FileAstId<Self::Source>;
@@ -486,7 +486,7 @@ macro_rules! mod_items {
}
mod_items! {
- Import in imports -> ast::Use,
+ Use in uses -> ast::Use,
ExternCrate in extern_crates -> ast::ExternCrate,
ExternBlock in extern_blocks -> ast::ExternBlock,
Function in functions -> ast::Fn,
@@ -541,7 +541,7 @@ impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct Import {
+pub struct Use {
pub visibility: RawVisibilityId,
pub ast_id: FileAstId<ast::Use>,
pub use_tree: UseTree,
@@ -744,7 +744,7 @@ pub struct MacroDef {
pub ast_id: FileAstId<ast::MacroDef>,
}
-impl Import {
+impl Use {
/// Maps a `UseTree` contained in this import back to its AST node.
pub fn use_tree_to_ast(
&self,
@@ -870,7 +870,7 @@ macro_rules! impl_froms {
impl ModItem {
pub fn as_assoc_item(&self) -> Option<AssocItem> {
match self {
- ModItem::Import(_)
+ ModItem::Use(_)
| ModItem::ExternCrate(_)
| ModItem::ExternBlock(_)
| ModItem::Struct(_)
@@ -892,7 +892,7 @@ impl ModItem {
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
match self {
- ModItem::Import(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Use(it) => tree[it.index].ast_id().upcast(),
ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
ModItem::Function(it) => tree[it.index].ast_id().upcast(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index 46633667e..7b898e62d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -502,13 +502,13 @@ impl<'a> Ctx<'a> {
Some(id(self.data().impls.alloc(res)))
}
- fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> {
+ fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
- let res = Import { visibility, ast_id, use_tree };
- Some(id(self.data().imports.alloc(res)))
+ let res = Use { visibility, ast_id, use_tree };
+ Some(id(self.data().uses.alloc(res)))
}
fn lower_extern_crate(
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index e873316a5..da30830fe 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -52,7 +52,7 @@ struct Printer<'a> {
needs_indent: bool,
}
-impl<'a> Printer<'a> {
+impl Printer<'_> {
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
self.indent_level += 1;
wln!(self);
@@ -198,8 +198,8 @@ impl<'a> Printer<'a> {
self.print_attrs_of(item);
match item {
- ModItem::Import(it) => {
- let Import { visibility, use_tree, ast_id: _ } = &self.tree[it];
+ ModItem::Use(it) => {
+ let Use { visibility, use_tree, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility);
w!(self, "use ");
self.print_use_tree(use_tree);
@@ -572,7 +572,7 @@ impl<'a> Printer<'a> {
}
}
-impl<'a> Write for Printer<'a> {
+impl Write for Printer<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') {
if self.needs_indent {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index 0e9ac58fb..627479bb7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -180,15 +180,15 @@ impl LangItems {
T: Into<AttrDefId> + Copy,
{
let _p = profile::span("collect_lang_item");
- if let Some(lang_item) = lang_attr(db, item) {
+ if let Some(lang_item) = db.lang_attr(item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item));
}
}
}
-pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<LangItem> {
- let attrs = db.attrs(item.into());
- attrs.by_key("lang").string_value().cloned().and_then(|it| LangItem::from_str(&it))
+pub(crate) fn lang_attr_query(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
+ let attrs = db.attrs(item);
+ attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(&it))
}
pub enum GenericRequirement {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 9d8b57a0d..1901db8a0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -64,7 +64,7 @@ use std::{
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use hir_expand::{
- ast_id_map::FileAstId,
+ ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
@@ -88,8 +88,8 @@ use crate::{
builtin_type::BuiltinType,
data::adt::VariantData,
item_tree::{
- Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, Static,
- Struct, Trait, TraitAlias, TypeAlias, Union,
+ Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules,
+ Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use,
},
};
@@ -121,6 +121,12 @@ impl From<CrateRootModuleId> for ModuleDefId {
}
}
+impl From<CrateId> for CrateRootModuleId {
+ fn from(krate: CrateId) -> Self {
+ CrateRootModuleId { krate }
+ }
+}
+
impl TryFrom<ModuleId> for CrateRootModuleId {
type Error = ();
@@ -145,24 +151,28 @@ pub struct ModuleId {
}
impl ModuleId {
- pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
+ pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
match self.block {
Some(block) => db.block_def_map(block),
None => db.crate_def_map(self.krate),
}
}
- pub fn krate(&self) -> CrateId {
+ pub fn krate(self) -> CrateId {
self.krate
}
- pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
+ pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
- pub fn containing_block(&self) -> Option<BlockId> {
+ pub fn containing_block(self) -> Option<BlockId> {
self.block
}
+
+ pub fn is_block_module(self) -> bool {
+ self.block.is_some() && self.local_id == DefMap::ROOT
+ }
}
/// An ID of a module, **local** to a `DefMap`.
@@ -314,6 +324,16 @@ type ImplLoc = ItemLoc<Impl>;
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct UseId(salsa::InternId);
+type UseLoc = ItemLoc<Use>;
+impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ExternCrateId(salsa::InternId);
+type ExternCrateLoc = ItemLoc<ExternCrate>;
+impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ExternBlockId(salsa::InternId);
type ExternBlockLoc = ItemLoc<ExternBlock>;
impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
@@ -392,14 +412,14 @@ impl TypeParamId {
impl TypeParamId {
/// Caller should check if this toc id really belongs to a type
- pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
- Self(x)
+ pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
+ Self(it)
}
}
impl From<TypeParamId> for TypeOrConstParamId {
- fn from(x: TypeParamId) -> Self {
- x.0
+ fn from(it: TypeParamId) -> Self {
+ it.0
}
}
@@ -418,14 +438,14 @@ impl ConstParamId {
impl ConstParamId {
/// Caller should check if this toc id really belongs to a const
- pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
- Self(x)
+ pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
+ Self(it)
}
}
impl From<ConstParamId> for TypeOrConstParamId {
- fn from(x: ConstParamId) -> Self {
- x.0
+ fn from(it: ConstParamId) -> Self {
+ it.0
}
}
@@ -548,14 +568,14 @@ pub enum TypeOwnerId {
impl TypeOwnerId {
fn as_generic_def_id(self) -> Option<GenericDefId> {
Some(match self {
- TypeOwnerId::FunctionId(x) => GenericDefId::FunctionId(x),
- TypeOwnerId::ConstId(x) => GenericDefId::ConstId(x),
- TypeOwnerId::AdtId(x) => GenericDefId::AdtId(x),
- TypeOwnerId::TraitId(x) => GenericDefId::TraitId(x),
- TypeOwnerId::TraitAliasId(x) => GenericDefId::TraitAliasId(x),
- TypeOwnerId::TypeAliasId(x) => GenericDefId::TypeAliasId(x),
- TypeOwnerId::ImplId(x) => GenericDefId::ImplId(x),
- TypeOwnerId::EnumVariantId(x) => GenericDefId::EnumVariantId(x),
+ TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
+ TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
+ TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it),
+ TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it),
+ TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
+ TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
+ TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
+ TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
return None
}
@@ -578,15 +598,15 @@ impl_from!(
for TypeOwnerId
);
-// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let x: Type = _; }`)
+// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`)
impl From<DefWithBodyId> for TypeOwnerId {
fn from(value: DefWithBodyId) -> Self {
match value {
- DefWithBodyId::FunctionId(x) => x.into(),
- DefWithBodyId::StaticId(x) => x.into(),
- DefWithBodyId::ConstId(x) => x.into(),
- DefWithBodyId::InTypeConstId(x) => x.into(),
- DefWithBodyId::VariantId(x) => x.into(),
+ DefWithBodyId::FunctionId(it) => it.into(),
+ DefWithBodyId::StaticId(it) => it.into(),
+ DefWithBodyId::ConstId(it) => it.into(),
+ DefWithBodyId::InTypeConstId(it) => it.into(),
+ DefWithBodyId::VariantId(it) => it.into(),
}
}
}
@@ -594,14 +614,14 @@ impl From<DefWithBodyId> for TypeOwnerId {
impl From<GenericDefId> for TypeOwnerId {
fn from(value: GenericDefId) -> Self {
match value {
- GenericDefId::FunctionId(x) => x.into(),
- GenericDefId::AdtId(x) => x.into(),
- GenericDefId::TraitId(x) => x.into(),
- GenericDefId::TraitAliasId(x) => x.into(),
- GenericDefId::TypeAliasId(x) => x.into(),
- GenericDefId::ImplId(x) => x.into(),
- GenericDefId::EnumVariantId(x) => x.into(),
- GenericDefId::ConstId(x) => x.into(),
+ GenericDefId::FunctionId(it) => it.into(),
+ GenericDefId::AdtId(it) => it.into(),
+ GenericDefId::TraitId(it) => it.into(),
+ GenericDefId::TraitAliasId(it) => it.into(),
+ GenericDefId::TypeAliasId(it) => it.into(),
+ GenericDefId::ImplId(it) => it.into(),
+ GenericDefId::EnumVariantId(it) => it.into(),
+ GenericDefId::ConstId(it) => it.into(),
}
}
}
@@ -716,7 +736,7 @@ impl GeneralConstId {
.const_data(const_id)
.name
.as_ref()
- .and_then(|x| x.as_str())
+ .and_then(|it| it.as_str())
.unwrap_or("_")
.to_owned(),
GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
@@ -821,6 +841,8 @@ pub enum AttrDefId {
ImplId(ImplId),
GenericParamId(GenericParamId),
ExternBlockId(ExternBlockId),
+ ExternCrateId(ExternCrateId),
+ UseId(UseId),
}
impl_from!(
@@ -835,7 +857,8 @@ impl_from!(
TypeAliasId,
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
ImplId,
- GenericParamId
+ GenericParamId,
+ ExternCrateId
for AttrDefId
);
@@ -927,6 +950,12 @@ impl HasModule for AdtId {
}
}
+impl HasModule for ExternCrateId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.lookup(db).container
+ }
+}
+
impl HasModule for VariantId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self {
@@ -950,17 +979,17 @@ impl HasModule for MacroId {
impl HasModule for TypeOwnerId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self {
- TypeOwnerId::FunctionId(x) => x.lookup(db).module(db),
- TypeOwnerId::StaticId(x) => x.lookup(db).module(db),
- TypeOwnerId::ConstId(x) => x.lookup(db).module(db),
- TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.module(db),
- TypeOwnerId::AdtId(x) => x.module(db),
- TypeOwnerId::TraitId(x) => x.lookup(db).container,
- TypeOwnerId::TraitAliasId(x) => x.lookup(db).container,
- TypeOwnerId::TypeAliasId(x) => x.lookup(db).module(db),
- TypeOwnerId::ImplId(x) => x.lookup(db).container,
- TypeOwnerId::EnumVariantId(x) => x.parent.lookup(db).container,
- TypeOwnerId::ModuleId(x) => *x,
+ TypeOwnerId::FunctionId(it) => it.lookup(db).module(db),
+ TypeOwnerId::StaticId(it) => it.lookup(db).module(db),
+ TypeOwnerId::ConstId(it) => it.lookup(db).module(db),
+ TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db),
+ TypeOwnerId::AdtId(it) => it.module(db),
+ TypeOwnerId::TraitId(it) => it.lookup(db).container,
+ TypeOwnerId::TraitAliasId(it) => it.lookup(db).container,
+ TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
+ TypeOwnerId::ImplId(it) => it.lookup(db).container,
+ TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
+ TypeOwnerId::ModuleId(it) => *it,
}
}
}
@@ -1050,6 +1079,8 @@ impl AttrDefId {
.krate
}
AttrDefId::MacroId(it) => it.module(db).krate,
+ AttrDefId::ExternCrateId(it) => it.lookup(db).container.krate,
+ AttrDefId::UseId(it) => it.lookup(db).container.krate,
}
}
}
@@ -1060,7 +1091,7 @@ pub trait AsMacroCall {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Option<MacroCallId> {
self.as_call_id_with_errors(db, krate, resolver).ok()?.value
}
@@ -1069,7 +1100,7 @@ pub trait AsMacroCall {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
}
@@ -1078,7 +1109,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
@@ -1089,24 +1120,25 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
- macro_call_as_call_id_(
+ macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
expands_to,
krate,
resolver,
+ resolver,
)
}
}
/// Helper wrapper for `AstId` with `ModPath`
#[derive(Clone, Debug, Eq, PartialEq)]
-struct AstIdWithPath<T: ast::AstNode> {
+struct AstIdWithPath<T: AstIdNode> {
ast_id: AstId<T>,
path: path::ModPath,
}
-impl<T: ast::AstNode> AstIdWithPath<T> {
+impl<T: AstIdNode> AstIdWithPath<T> {
fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
}
@@ -1117,33 +1149,39 @@ fn macro_call_as_call_id(
call: &AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
- macro_call_as_call_id_(db, call, expand_to, krate, resolver).map(|res| res.value)
+ macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
+ .map(|res| res.value)
}
-fn macro_call_as_call_id_(
+fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
+ eager_resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let def =
resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
- let res = if let MacroDefKind::BuiltInEager(..) = def.kind {
- let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
- expand_eager_macro_input(db, krate, macro_call, def, &resolver)?
- } else {
- ExpandResult {
+ let res = match def.kind {
+ MacroDefKind::BuiltInEager(..) => {
+ let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
+ expand_eager_macro_input(db, krate, macro_call, def, &|path| {
+ eager_resolver(path).filter(MacroDefId::is_fn_like)
+ })
+ }
+ _ if def.is_fn_like() => ExpandResult {
value: Some(def.as_lazy_macro(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
)),
err: None,
- }
+ },
+ _ => return Err(UnresolvedMacro { path: call.path.clone() }),
};
Ok(res)
}
@@ -1228,6 +1266,7 @@ fn derive_macro_as_call_id(
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(item_attr.path.clone())
+ .filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let call_id = def_id.as_lazy_macro(
db.upcast(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
index af623fd0e..e523c2291 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
@@ -1,5 +1,9 @@
//! Context for lowering paths.
-use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile};
+use hir_expand::{
+ ast_id_map::{AstIdMap, AstIdNode},
+ hygiene::Hygiene,
+ AstId, HirFileId, InFile,
+};
use once_cell::unsync::OnceCell;
use syntax::ast;
use triomphe::Arc;
@@ -37,7 +41,7 @@ impl<'a> LowerCtx<'a> {
Path::from_src(ast, self)
}
- pub(crate) fn ast_id<N: syntax::AstNode>(&self, item: &N) -> Option<AstId<N>> {
+ pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {
let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id));
Some(InFile::new(file_id, ast_id_map.ast_id(item)))
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index f41f97190..abd84c6a4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -279,6 +279,44 @@ impl < > core::cmp::Eq for Command< > where {}"#]],
}
#[test]
+fn test_partial_eq_expand_with_derive_const() {
+ // FIXME: actually expand with const
+ check(
+ r#"
+//- minicore: derive, eq
+#[derive_const(PartialEq, Eq)]
+enum Command {
+ Move { x: i32, y: i32 },
+ Do(&'static str),
+ Jump,
+}
+"#,
+ expect![[r#"
+#[derive_const(PartialEq, Eq)]
+enum Command {
+ Move { x: i32, y: i32 },
+ Do(&'static str),
+ Jump,
+}
+
+impl < > core::cmp::PartialEq for Command< > where {
+ fn eq(&self , other: &Self ) -> bool {
+ match (self , other) {
+ (Command::Move {
+ x: x_self, y: y_self,
+ }
+ , Command::Move {
+ x: x_other, y: y_other,
+ }
+ )=>x_self.eq(x_other) && y_self.eq(y_other), (Command::Do(f0_self, ), Command::Do(f0_other, ))=>f0_self.eq(f0_other), (Command::Jump, Command::Jump)=>true , _unused=>false
+ }
+ }
+}
+impl < > core::cmp::Eq for Command< > where {}"#]],
+ );
+}
+
+#[test]
fn test_partial_ord_expand() {
check(
r#"
@@ -379,6 +417,44 @@ fn test_hash_expand() {
use core::hash::Hash;
#[derive(Hash)]
+struct Foo {
+ x: i32,
+ y: u64,
+ z: (i32, u64),
+}
+"#,
+ expect![[r#"
+use core::hash::Hash;
+
+#[derive(Hash)]
+struct Foo {
+ x: i32,
+ y: u64,
+ z: (i32, u64),
+}
+
+impl < > core::hash::Hash for Foo< > where {
+ fn hash<H: core::hash::Hasher>(&self , ra_expand_state: &mut H) {
+ match self {
+ Foo {
+ x: x, y: y, z: z,
+ }
+ => {
+ x.hash(ra_expand_state);
+ y.hash(ra_expand_state);
+ z.hash(ra_expand_state);
+ }
+ ,
+ }
+ }
+}"#]],
+ );
+ check(
+ r#"
+//- minicore: derive, hash
+use core::hash::Hash;
+
+#[derive(Hash)]
enum Command {
Move { x: i32, y: i32 },
Do(&'static str),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 07d9baa58..1250cbb74 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -201,7 +201,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(arg2), ::core::fmt::Debug::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(arg2), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
@@ -235,11 +235,11 @@ macro_rules! format_args {
fn main() {
/* error: no rule matches input tokens */;
- /* error: no rule matches input tokens */;
- /* error: no rule matches input tokens */;
- /* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(), ::core::fmt::Display::fmt), ]);
- /* error: no rule matches input tokens */;
- ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(5), ::core::fmt::Display::fmt), ]);
+ /* error: expected expression */;
+ /* error: expected expression, expected COMMA */;
+ /* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
+ /* error: expected expression, expected expression */;
+ ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]);
}
"##]],
);
@@ -267,7 +267,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
@@ -300,7 +300,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::Argument::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::ArgumentV1::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]);
}
"##]],
);
@@ -334,7 +334,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::Argument::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::ArgumentV1::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
@@ -364,8 +364,8 @@ macro_rules! format_args {
fn main() {
let _ =
- /* error: no rule matches input tokens *//* parse error: expected field name or number */
-::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(), ::core::fmt::Debug::fmt), ]);
+ /* error: expected field name or number *//* parse error: expected field name or number */
+::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 553ffe3d0..2170cadcf 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -98,6 +98,66 @@ fn#19 main#20(#21)#21 {#22
"##]],
);
}
+
+#[test]
+fn eager_expands_with_unresolved_within() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+fn main(foo: ()) {
+ format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+fn main(foo: ()) {
+ /* error: unresolved macro identity */::core::fmt::Arguments::new_v1(&["", " ", " ", ], &[::core::fmt::ArgumentV1::new(&(::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(0), ::core::fmt::Display::fmt), ])), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(foo), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(identity!(10)), ::core::fmt::Display::fmt), ])
+}
+"##]],
+ );
+}
+
+#[test]
+fn token_mapping_eager() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+macro_rules! identity {
+ ($expr:expr) => { $expr };
+}
+
+fn main(foo: ()) {
+ format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+}
+
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+macro_rules! identity {
+ ($expr:expr) => { $expr };
+}
+
+fn main(foo: ()) {
+ // format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22)
+::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295
+}
+
+"##]],
+ );
+}
+
#[test]
fn float_field_access_macro_input() {
check(
@@ -813,6 +873,37 @@ fn foo() {
}
#[test]
+fn test_type_path_is_transcribed_as_expr_path() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:path) => { let $p; }
+}
+fn test() {
+ m!(S)
+ m!(S<i32>)
+ m!(S<S<i32>>)
+ m!(S<{ module::CONST < 42 }>)
+}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:path) => { let $p; }
+}
+fn test() {
+ let S;
+ let S:: <i32> ;
+ let S:: <S:: <i32>> ;
+ let S:: < {
+ module::CONST<42
+ }
+ > ;
+}
+"#]],
+ );
+}
+
+#[test]
fn test_expr() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index 4a62696df..7a87e61c6 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -20,8 +20,8 @@ use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
- db::{ExpandDatabase, TokenExpander},
- AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
+ db::{DeclarativeMacroExpander, ExpandDatabase},
+ AstId, InFile, MacroFile,
};
use stdx::format_to;
use syntax::{
@@ -100,32 +100,29 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
- let kind = MacroDefKind::Declarative(ast_id);
- let macro_def = db
- .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false })
- .unwrap();
- if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
- let tt = match &macro_ {
- ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
- ast::Macro::MacroDef(_) => unimplemented!(""),
- };
+ let DeclarativeMacroExpander { mac, def_site_token_map } =
+ &*db.decl_macro_expander(krate, ast_id);
+ assert_eq!(mac.err(), None);
+ let tt = match &macro_ {
+ ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
+ ast::Macro::MacroDef(_) => unimplemented!(""),
+ };
- let tt_start = tt.syntax().text_range().start();
- tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
- |token| {
- let range = token.text_range().checked_sub(tt_start).unwrap();
- if let Some(id) = def_site_token_map.token_by_range(range) {
- let offset = (range.end() + tt_start).into();
- text_edits.push((offset..offset, format!("#{}", id.0)));
- }
- },
- );
- text_edits.push((
- call_offset..call_offset,
- format!("// call ids will be shifted by {:?}\n", mac.shift()),
- ));
- }
+ let tt_start = tt.syntax().text_range().start();
+ tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
+ |token| {
+ let range = token.text_range().checked_sub(tt_start).unwrap();
+ if let Some(id) = def_site_token_map.token_by_range(range) {
+ let offset = (range.end() + tt_start).into();
+ text_edits.push((offset..offset, format!("#{}", id.0)));
+ }
+ },
+ );
+ text_edits.push((
+ call_offset..call_offset,
+ format!("// call ids will be shifted by {:?}\n", mac.shift()),
+ ));
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
@@ -190,7 +187,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let range: Range<usize> = range.into();
if show_token_ids {
- if let Some((tree, map, _)) = arg.as_deref() {
+ if let Some((tree, map, _)) = arg.value.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, map, tree);
@@ -239,7 +236,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
- if src.file_id.is_builtin_derive(&db).is_some() {
+ if src.file_id.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
format_to!(expanded_text, "\n{}", pp)
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index 0ab1bd849..86818ce26 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -60,7 +60,7 @@ mod tests;
use std::{cmp::Ord, ops::Deref};
use base_db::{CrateId, Edition, FileId, ProcMacroKind};
-use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId};
+use hir_expand::{name::Name, HirFileId, InFile, MacroCallId, MacroDefId};
use itertools::Itertools;
use la_arena::Arena;
use profile::Count;
@@ -196,6 +196,10 @@ impl BlockRelativeModuleId {
fn into_module(self, krate: CrateId) -> ModuleId {
ModuleId { krate, block: self.block, local_id: self.local_id }
}
+
+ fn is_block_module(self) -> bool {
+ self.block.is_some() && self.local_id == DefMap::ROOT
+ }
}
impl std::ops::Index<LocalModuleId> for DefMap {
@@ -278,7 +282,9 @@ pub struct ModuleData {
pub origin: ModuleOrigin,
/// Declared visibility of this module.
pub visibility: Visibility,
- /// Always [`None`] for block modules
+ /// Parent module in the same `DefMap`.
+ ///
+ /// [`None`] for block modules because they are always its `DefMap`'s root.
pub parent: Option<LocalModuleId>,
pub children: FxHashMap<Name, LocalModuleId>,
pub scope: ItemScope,
@@ -626,6 +632,17 @@ impl ModuleData {
self.origin.definition_source(db)
}
+ /// Same as [`definition_source`] but only returns the file id to prevent parsing the ASt.
+ pub fn definition_source_file_id(&self) -> HirFileId {
+ match self.origin {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
+ definition.into()
+ }
+ ModuleOrigin::Inline { definition, .. } => definition.file_id,
+ ModuleOrigin::BlockExpr { block } => block.file_id,
+ }
+ }
+
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block.
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 62fb3c788..eef54fc49 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -38,7 +38,7 @@ use crate::{
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode,
MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
},
- macro_call_as_call_id, macro_id_to_def_id,
+ macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_id_to_def_id,
nameres::{
diagnostics::DefDiagnostic,
mod_resolution::ModDir,
@@ -52,10 +52,10 @@ use crate::{
tt,
visibility::{RawVisibility, Visibility},
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
- ExternBlockLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId,
- Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId,
- ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc,
- TypeAliasLoc, UnionLoc, UnresolvedMacro,
+ ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId,
+ LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc,
+ ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc,
+ TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseLoc,
};
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
@@ -146,7 +146,7 @@ impl PartialResolvedImport {
#[derive(Clone, Debug, Eq, PartialEq)]
enum ImportSource {
- Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> },
+ Use { id: ItemTreeId<item_tree::Use>, use_tree: Idx<ast::UseTree> },
ExternCrate(ItemTreeId<item_tree::ExternCrate>),
}
@@ -156,10 +156,9 @@ struct Import {
alias: Option<ImportAlias>,
visibility: RawVisibility,
kind: ImportKind,
+ source: ImportSource,
is_prelude: bool,
- is_extern_crate: bool,
is_macro_use: bool,
- source: ImportSource,
}
impl Import {
@@ -167,27 +166,24 @@ impl Import {
db: &dyn DefDatabase,
krate: CrateId,
tree: &ItemTree,
- id: ItemTreeId<item_tree::Import>,
- ) -> Vec<Self> {
+ id: ItemTreeId<item_tree::Use>,
+ mut cb: impl FnMut(Self),
+ ) {
let it = &tree[id.value];
let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
let visibility = &tree[it.visibility];
let is_prelude = attrs.by_key("prelude_import").exists();
-
- let mut res = Vec::new();
it.use_tree.expand(|idx, path, kind, alias| {
- res.push(Self {
+ cb(Self {
path,
alias,
visibility: visibility.clone(),
kind,
is_prelude,
- is_extern_crate: false,
is_macro_use: false,
- source: ImportSource::Import { id, use_tree: idx },
+ source: ImportSource::Use { id, use_tree: idx },
});
});
- res
}
fn from_extern_crate(
@@ -205,7 +201,6 @@ impl Import {
visibility: visibility.clone(),
kind: ImportKind::Plain,
is_prelude: false,
- is_extern_crate: true,
is_macro_use: attrs.by_key("macro_use").exists(),
source: ImportSource::ExternCrate(id),
}
@@ -776,7 +771,7 @@ impl DefCollector<'_> {
let _p = profile::span("resolve_import")
.detail(|| format!("{}", import.path.display(self.db.upcast())));
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
- if import.is_extern_crate {
+ if matches!(import.source, ImportSource::ExternCrate { .. }) {
let name = import
.path
.as_ident()
@@ -813,11 +808,8 @@ impl DefCollector<'_> {
}
}
- // Check whether all namespace is resolved
- if def.take_types().is_some()
- && def.take_values().is_some()
- && def.take_macros().is_some()
- {
+ // Check whether all namespaces are resolved.
+ if def.is_full() {
PartialResolvedImport::Resolved(def)
} else {
PartialResolvedImport::Indeterminate(def)
@@ -826,7 +818,7 @@ impl DefCollector<'_> {
}
fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> {
- if *name == name!(self) {
+ if *name == name![self] {
cov_mark::hit!(extern_crate_self_as);
Some(self.def_map.crate_root())
} else {
@@ -867,7 +859,7 @@ impl DefCollector<'_> {
tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
- if import.is_extern_crate
+ if matches!(import.source, ImportSource::ExternCrate { .. })
&& self.def_map.block.is_none()
&& module_id == DefMap::ROOT
{
@@ -1482,7 +1474,7 @@ impl DefCollector<'_> {
}
for directive in &self.unresolved_imports {
- if let ImportSource::Import { id: import, use_tree } = directive.import.source {
+ if let ImportSource::Use { id: import, use_tree } = directive.import.source {
if matches!(
(directive.import.path.segments().first(), &directive.import.path.kind),
(Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
@@ -1584,22 +1576,33 @@ impl ModCollector<'_, '_> {
match item {
ModItem::Mod(m) => self.collect_module(m, &attrs),
- ModItem::Import(import_id) => {
- let imports = Import::from_use(
+ ModItem::Use(import_id) => {
+ let _import_id =
+ UseLoc { container: module, id: ItemTreeId::new(self.tree_id, import_id) }
+ .intern(db);
+ Import::from_use(
db,
krate,
self.item_tree,
ItemTreeId::new(self.tree_id, import_id),
- );
- self.def_collector.unresolved_imports.extend(imports.into_iter().map(
- |import| ImportDirective {
- module_id: self.module_id,
- import,
- status: PartialResolvedImport::Unresolved,
+ |import| {
+ self.def_collector.unresolved_imports.push(ImportDirective {
+ module_id: self.module_id,
+ import,
+ status: PartialResolvedImport::Unresolved,
+ });
},
- ));
+ )
}
ModItem::ExternCrate(import_id) => {
+ let extern_crate_id = ExternCrateLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, import_id),
+ }
+ .intern(db);
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_extern_crate_decl(extern_crate_id);
self.def_collector.unresolved_imports.push(ImportDirective {
module_id: self.module_id,
import: Import::from_extern_crate(
@@ -2182,7 +2185,7 @@ impl ModCollector<'_, '_> {
// scopes without eager expansion.
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules
- if let Ok(res) = macro_call_as_call_id(
+ if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
mac.expand_to,
@@ -2205,19 +2208,34 @@ impl ModCollector<'_, '_> {
.map(|it| macro_id_to_def_id(self.def_collector.db, it))
})
},
- ) {
- // Legacy macros need to be expanded immediately, so that any macros they produce
- // are in scope.
- if let Some(val) = res {
- self.def_collector.collect_macro_expansion(
+ |path| {
+ let resolved_res = self.def_collector.def_map.resolve_path_fp_with_macro(
+ db,
+ ResolveMode::Other,
self.module_id,
- val,
- self.macro_depth + 1,
- container,
+ &path,
+ BuiltinShadowMode::Module,
+ Some(MacroSubNs::Bang),
);
- }
+ resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(db, it))
+ },
+ ) {
+ // FIXME: if there were errors, this mightve been in the eager expansion from an
+ // unresolved macro, so we need to push this into late macro resolution. see fixme above
+ if res.err.is_none() {
+ // Legacy macros need to be expanded immediately, so that any macros they produce
+ // are in scope.
+ if let Some(val) = res.value {
+ self.def_collector.collect_macro_expansion(
+ self.module_id,
+ val,
+ self.macro_depth + 1,
+ container,
+ );
+ }
- return;
+ return;
+ }
}
// Case 2: resolve in module scope, expand during name resolution.
@@ -2230,8 +2248,12 @@ impl ModCollector<'_, '_> {
}
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
- let Some((source, target)) = Self::borrow_modules(self.def_collector.def_map.modules.as_mut(), module_id, self.module_id) else {
- return
+ let Some((source, target)) = Self::borrow_modules(
+ self.def_collector.def_map.modules.as_mut(),
+ module_id,
+ self.module_id,
+ ) else {
+ return;
};
for (name, macs) in source.scope.legacy_macros() {
@@ -2271,7 +2293,7 @@ impl ModCollector<'_, '_> {
fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
let ast_id = item.ast_id(self.item_tree);
- let ast_id = InFile::new(self.file_id(), ast_id.upcast());
+ let ast_id = InFile::new(self.file_id(), ast_id.erase());
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
ast_id,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
index 18b424255..9cffb3c9f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -2,12 +2,9 @@
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
-use hir_expand::{attrs::AttrId, MacroCallKind};
+use hir_expand::{attrs::AttrId, ErasedAstId, MacroCallKind};
use la_arena::Idx;
-use syntax::{
- ast::{self, AnyHasAttrs},
- SyntaxError,
-};
+use syntax::{ast, SyntaxError};
use crate::{
item_tree::{self, ItemTreeId},
@@ -22,9 +19,9 @@ pub enum DefDiagnosticKind {
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
- UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
+ UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
- UnconfiguredCode { ast: AstId<AnyHasAttrs>, cfg: CfgExpr, opts: CfgOptions },
+ UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
@@ -73,7 +70,7 @@ impl DefDiagnostic {
pub(super) fn unresolved_import(
container: LocalModuleId,
- id: ItemTreeId<item_tree::Import>,
+ id: ItemTreeId<item_tree::Use>,
index: Idx<ast::UseTree>,
) -> Self {
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
@@ -81,7 +78,7 @@ impl DefDiagnostic {
pub fn unconfigured_code(
container: LocalModuleId,
- ast: AstId<ast::AnyHasAttrs>,
+ ast: ErasedAstId,
cfg: CfgExpr,
opts: CfgOptions,
) -> Self {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
index 5f6163175..de22ea101 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -12,11 +12,12 @@
use base_db::Edition;
use hir_expand::name::Name;
+use triomphe::Arc;
use crate::{
db::DefDatabase,
item_scope::BUILTIN_SCOPE,
- nameres::{sub_namespace_match, BuiltinShadowMode, DefMap, MacroSubNs},
+ nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs},
path::{ModPath, PathKind},
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
@@ -159,13 +160,15 @@ impl DefMap {
(None, new) => new,
};
- match &current_map.block {
- Some(block) => {
+ match current_map.block {
+ Some(block) if original_module == Self::ROOT => {
+ // Block modules "inherit" names from its parent module.
original_module = block.parent.local_id;
arc = block.parent.def_map(db, current_map.krate);
- current_map = &*arc;
+ current_map = &arc;
}
- None => return result,
+ // Proper (non-block) modules, including those in block `DefMap`s, don't.
+ _ => return result,
}
}
}
@@ -189,7 +192,7 @@ impl DefMap {
));
let mut segments = path.segments().iter().enumerate();
- let mut curr_per_ns: PerNs = match path.kind {
+ let mut curr_per_ns = match path.kind {
PathKind::DollarCrate(krate) => {
if krate == self.krate {
cov_mark::hit!(macro_dollar_crate_self);
@@ -241,51 +244,54 @@ impl DefMap {
)
}
PathKind::Super(lvl) => {
- let mut module = original_module;
- for i in 0..lvl {
- match self.modules[module].parent {
- Some(it) => module = it,
- None => match &self.block {
- Some(block) => {
- // Look up remaining path in parent `DefMap`
- let new_path = ModPath::from_segments(
- PathKind::Super(lvl - i),
- path.segments().to_vec(),
- );
- tracing::debug!(
- "`super` path: {} -> {} in parent map",
- path.display(db.upcast()),
- new_path.display(db.upcast())
- );
- return block
- .parent
- .def_map(db, self.krate)
- .resolve_path_fp_with_macro(
- db,
- mode,
- block.parent.local_id,
- &new_path,
- shadow,
- expected_macro_subns,
- );
- }
- None => {
- tracing::debug!("super path in root module");
- return ResolvePathResult::empty(ReachedFixedPoint::Yes);
- }
- },
- }
+ let mut local_id = original_module;
+ let mut ext;
+ let mut def_map = self;
+
+ // Adjust `local_id` to `self`, i.e. the nearest non-block module.
+ if def_map.module_id(local_id).is_block_module() {
+ (ext, local_id) = adjust_to_nearest_non_block_module(db, def_map, local_id);
+ def_map = &ext;
}
- // Resolve `self` to the containing crate-rooted module if we're a block
- self.with_ancestor_maps(db, module, &mut |def_map, module| {
- if def_map.block.is_some() {
- None // keep ascending
+ // Go up the module tree but skip block modules as `super` always refers to the
+ // nearest non-block module.
+ for _ in 0..lvl {
+ // Loop invariant: at the beginning of each loop, `local_id` must refer to a
+ // non-block module.
+ if let Some(parent) = def_map.modules[local_id].parent {
+ local_id = parent;
+ if def_map.module_id(local_id).is_block_module() {
+ (ext, local_id) =
+ adjust_to_nearest_non_block_module(db, def_map, local_id);
+ def_map = &ext;
+ }
} else {
- Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public))
+ stdx::always!(def_map.block.is_none());
+ tracing::debug!("super path in root module");
+ return ResolvePathResult::empty(ReachedFixedPoint::Yes);
}
- })
- .expect("block DefMap not rooted in crate DefMap")
+ }
+
+ let module = def_map.module_id(local_id);
+ stdx::never!(module.is_block_module());
+
+ if self.block != def_map.block {
+ // If we have a different `DefMap` from `self` (the orignal `DefMap` we started
+ // with), resolve the remaining path segments in that `DefMap`.
+ let path =
+ ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
+ return def_map.resolve_path_fp_with_macro(
+ db,
+ mode,
+ local_id,
+ &path,
+ shadow,
+ expected_macro_subns,
+ );
+ }
+
+ PerNs::types(module.into(), Visibility::Public)
}
PathKind::Abs => {
// 2018-style absolute path -- only extern prelude
@@ -508,3 +514,27 @@ impl DefMap {
}
}
}
+
+/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to.
+fn adjust_to_nearest_non_block_module(
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ mut local_id: LocalModuleId,
+) -> (Arc<DefMap>, LocalModuleId) {
+ // INVARIANT: `local_id` in `def_map` must be a block module.
+ stdx::always!(def_map.module_id(local_id).is_block_module());
+
+ let mut ext;
+ // This needs to be a local variable due to our mighty lifetime.
+ let mut def_map = def_map;
+ loop {
+ let BlockInfo { parent, .. } = def_map.block.expect("block module without parent module");
+
+ ext = parent.def_map(db, def_map.krate);
+ def_map = &ext;
+ local_id = parent.local_id;
+ if !parent.is_block_module() {
+ return (ext, local_id);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index 4931c36bb..40d3a1654 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -213,17 +213,17 @@ pub type Ty = ();
for (_, res) in module_data.scope.resolutions() {
match res.values.or(res.types).unwrap().0 {
- ModuleDefId::FunctionId(f) => drop(db.function_data(f)),
+ ModuleDefId::FunctionId(f) => _ = db.function_data(f),
ModuleDefId::AdtId(adt) => match adt {
- AdtId::StructId(it) => drop(db.struct_data(it)),
- AdtId::UnionId(it) => drop(db.union_data(it)),
- AdtId::EnumId(it) => drop(db.enum_data(it)),
+ AdtId::StructId(it) => _ = db.struct_data(it),
+ AdtId::UnionId(it) => _ = db.union_data(it),
+ AdtId::EnumId(it) => _ = db.enum_data(it),
},
- ModuleDefId::ConstId(it) => drop(db.const_data(it)),
- ModuleDefId::StaticId(it) => drop(db.static_data(it)),
- ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
- ModuleDefId::TraitAliasId(it) => drop(db.trait_alias_data(it)),
- ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
+ ModuleDefId::ConstId(it) => _ = db.const_data(it),
+ ModuleDefId::StaticId(it) => _ = db.static_data(it),
+ ModuleDefId::TraitId(it) => _ = db.trait_data(it),
+ ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_data(it),
+ ModuleDefId::TypeAliasId(it) => _ = db.type_alias_data(it),
ModuleDefId::EnumVariantId(_)
| ModuleDefId::ModuleId(_)
| ModuleDefId::MacroId(_)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
index ff4ae6954..06530cc7e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -45,7 +45,7 @@ pub enum Path {
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
},
- /// A link to a lang item. It is used in desugaring of things like `x?`. We can show these
+ /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget),
}
@@ -135,10 +135,7 @@ impl Path {
pub fn segments(&self) -> PathSegments<'_> {
let Path::Normal { mod_path, generic_args, .. } = self else {
- return PathSegments {
- segments: &[],
- generic_args: None,
- };
+ return PathSegments { segments: &[], generic_args: None };
};
let s =
PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() };
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index 1cb17ff0d..abd817893 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -74,8 +74,8 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
- Path::from_src(trait_ref.path()?, ctx)? else
- {
+ Path::from_src(trait_ref.path()?, ctx)?
+ else {
return None;
};
let num_segments = mod_path.segments().len();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
index 0aead6f37..11d58a6ba 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
@@ -12,8 +12,8 @@ use crate::{
};
pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
- if let Path::LangItem(x) = path {
- return write!(buf, "$lang_item::{x:?}");
+ if let Path::LangItem(it) = path {
+ return write!(buf, "$lang_item::{it:?}");
}
match path.type_anchor() {
Some(anchor) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 0d6f55411..b112c1070 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -22,10 +22,10 @@ use crate::{
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId,
- EnumVariantId, ExternBlockId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId,
- ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId,
- ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId,
+ EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
+ HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId,
+ MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
};
#[derive(Debug, Clone)]
@@ -186,12 +186,12 @@ impl Resolver {
Path::LangItem(l) => {
return Some((
match *l {
- LangItemTarget::Union(x) => TypeNs::AdtId(x.into()),
- LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x),
- LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()),
- LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x),
- LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()),
- LangItemTarget::Trait(x) => TypeNs::TraitId(x),
+ LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
+ LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
+ LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
+ LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
+ LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
+ LangItemTarget::Trait(it) => TypeNs::TraitId(it),
LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
@@ -273,10 +273,10 @@ impl Resolver {
Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => {
return Some(ResolveValueResult::ValueNs(match *l {
- LangItemTarget::Function(x) => ValueNs::FunctionId(x),
- LangItemTarget::Static(x) => ValueNs::StaticId(x),
- LangItemTarget::Struct(x) => ValueNs::StructId(x),
- LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x),
+ LangItemTarget::Function(it) => ValueNs::FunctionId(it),
+ LangItemTarget::Static(it) => ValueNs::StaticId(it),
+ LangItemTarget::Struct(it) => ValueNs::StructId(it),
+ LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
LangItemTarget::Union(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::TypeAlias(_)
@@ -425,14 +425,14 @@ impl Resolver {
/// The shadowing is accounted for: in
///
/// ```
- /// let x = 92;
+ /// let it = 92;
/// {
- /// let x = 92;
+ /// let it = 92;
/// $0
/// }
/// ```
///
- /// there will be only one entry for `x` in the result.
+ /// there will be only one entry for `it` in the result.
///
/// The result is ordered *roughly* from the innermost scope to the
/// outermost: when the name is introduced in two namespaces in two scopes,
@@ -1018,20 +1018,32 @@ impl HasResolver for ExternBlockId {
}
}
+impl HasResolver for ExternCrateId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for UseId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
impl HasResolver for TypeOwnerId {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self {
- TypeOwnerId::FunctionId(x) => x.resolver(db),
- TypeOwnerId::StaticId(x) => x.resolver(db),
- TypeOwnerId::ConstId(x) => x.resolver(db),
- TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.resolver(db),
- TypeOwnerId::AdtId(x) => x.resolver(db),
- TypeOwnerId::TraitId(x) => x.resolver(db),
- TypeOwnerId::TraitAliasId(x) => x.resolver(db),
- TypeOwnerId::TypeAliasId(x) => x.resolver(db),
- TypeOwnerId::ImplId(x) => x.resolver(db),
- TypeOwnerId::EnumVariantId(x) => x.resolver(db),
- TypeOwnerId::ModuleId(x) => x.resolver(db),
+ TypeOwnerId::FunctionId(it) => it.resolver(db),
+ TypeOwnerId::StaticId(it) => it.resolver(db),
+ TypeOwnerId::ConstId(it) => it.resolver(db),
+ TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db),
+ TypeOwnerId::AdtId(it) => it.resolver(db),
+ TypeOwnerId::TraitId(it) => it.resolver(db),
+ TypeOwnerId::TraitAliasId(it) => it.resolver(db),
+ TypeOwnerId::TypeAliasId(it) => it.resolver(db),
+ TypeOwnerId::ImplId(it) => it.resolver(db),
+ TypeOwnerId::EnumVariantId(it) => it.resolver(db),
+ TypeOwnerId::ModuleId(it) => it.resolver(db),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 40d8659f2..1f27204c1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -16,11 +16,9 @@ cov-mark = "2.0.0-pre.1"
tracing = "0.1.35"
either = "1.7.0"
rustc-hash = "1.1.0"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
itertools = "0.10.5"
-hashbrown = { version = "0.12.1", features = [
- "inline-more",
-], default-features = false }
+hashbrown.workspace = true
smallvec.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
index c2b0d5985..1906ed15b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -18,47 +18,89 @@ use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
-pub struct FileAstId<N: AstNode> {
+pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
covariant: PhantomData<fn() -> N>,
}
-impl<N: AstNode> Clone for FileAstId<N> {
+impl<N: AstIdNode> Clone for FileAstId<N> {
fn clone(&self) -> FileAstId<N> {
*self
}
}
-impl<N: AstNode> Copy for FileAstId<N> {}
+impl<N: AstIdNode> Copy for FileAstId<N> {}
-impl<N: AstNode> PartialEq for FileAstId<N> {
+impl<N: AstIdNode> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
-impl<N: AstNode> Eq for FileAstId<N> {}
-impl<N: AstNode> Hash for FileAstId<N> {
+impl<N: AstIdNode> Eq for FileAstId<N> {}
+impl<N: AstIdNode> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
-impl<N: AstNode> fmt::Debug for FileAstId<N> {
+impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
}
}
-impl<N: AstNode> FileAstId<N> {
+impl<N: AstIdNode> FileAstId<N> {
// Can't make this a From implementation because of coherence
- pub fn upcast<M: AstNode>(self) -> FileAstId<M>
+ pub fn upcast<M: AstIdNode>(self) -> FileAstId<M>
where
N: Into<M>,
{
FileAstId { raw: self.raw, covariant: PhantomData }
}
+
+ pub fn erase(self) -> ErasedFileAstId {
+ self.raw
+ }
}
-type ErasedFileAstId = Idx<SyntaxNodePtr>;
+pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
+
+pub trait AstIdNode: AstNode {}
+macro_rules! register_ast_id_node {
+ (impl AstIdNode for $($ident:ident),+ ) => {
+ $(
+ impl AstIdNode for ast::$ident {}
+ )+
+ fn should_alloc_id(kind: syntax::SyntaxKind) -> bool {
+ $(
+ ast::$ident::can_cast(kind)
+ )||+
+ }
+ };
+}
+register_ast_id_node! {
+ impl AstIdNode for
+ Item,
+ Adt,
+ Enum,
+ Struct,
+ Union,
+ Const,
+ ExternBlock,
+ ExternCrate,
+ Fn,
+ Impl,
+ Macro,
+ MacroDef,
+ MacroRules,
+ MacroCall,
+ Module,
+ Static,
+ Trait,
+ TraitAlias,
+ TypeAlias,
+ Use,
+ AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
+}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Default)]
@@ -92,14 +134,7 @@ impl AstIdMap {
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bdfs(node, |it| {
- let kind = it.kind();
- if ast::Item::can_cast(kind)
- || ast::BlockExpr::can_cast(kind)
- || ast::Variant::can_cast(kind)
- || ast::RecordField::can_cast(kind)
- || ast::TupleField::can_cast(kind)
- || ast::ConstArg::can_cast(kind)
- {
+ if should_alloc_id(it.kind()) {
res.alloc(&it);
true
} else {
@@ -120,15 +155,19 @@ impl AstIdMap {
res
}
- pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
+ pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
}
- pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
+ pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
+ self.arena[id].clone()
+ }
+
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
let ptr = SyntaxNodePtr::new(item);
let hash = hash_ptr(&ptr);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 80695bc06..4ee12e2f2 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -35,7 +35,7 @@ macro_rules! register_builtin {
impl BuiltinAttrExpander {
pub fn is_derive(self) -> bool {
- matches!(self, BuiltinAttrExpander::Derive)
+ matches!(self, BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst)
}
pub fn is_test(self) -> bool {
matches!(self, BuiltinAttrExpander::Test)
@@ -50,6 +50,8 @@ register_builtin! {
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
(derive, Derive) => derive_attr_expand,
+ // derive const is equivalent to derive for our proposes.
+ (derive_const, DeriveConst) => derive_attr_expand,
(global_allocator, GlobalAllocator) => dummy_attr_expand,
(test, Test) => dummy_attr_expand,
(test_case, TestCase) => dummy_attr_expand
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index 3d1e272b9..ecc8b407a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -12,9 +12,7 @@ use crate::{
name::{AsName, Name},
tt::{self, TokenId},
};
-use syntax::ast::{
- self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
-};
+use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
@@ -30,12 +28,13 @@ macro_rules! register_builtin {
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ token_map: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
- expander(db, id, tt)
+ expander(db, id, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@@ -72,12 +71,12 @@ enum VariantShape {
}
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
- (0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified()))
+ (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
}
impl VariantShape {
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
- self.as_pattern_map(path, |x| quote!(#x))
+ self.as_pattern_map(path, |it| quote!(#it))
}
fn field_names(&self) -> Vec<tt::Ident> {
@@ -95,17 +94,17 @@ impl VariantShape {
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
- let fields = fields.iter().map(|x| {
- let mapped = field_map(x);
- quote! { #x : #mapped , }
+ let fields = fields.iter().map(|it| {
+ let mapped = field_map(it);
+ quote! { #it : #mapped , }
});
quote! {
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
- let fields = tuple_field_iterator(n).map(|x| {
- let mapped = field_map(&x);
+ let fields = tuple_field_iterator(n).map(|it| {
+ let mapped = field_map(&it);
quote! {
#mapped ,
}
@@ -118,16 +117,16 @@ impl VariantShape {
}
}
- fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> {
+ fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
- Some(FieldList::RecordFieldList(x)) => VariantShape::Struct(
- x.fields()
- .map(|x| x.name())
- .map(|x| name_to_token(token_map, x))
+ Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
+ it.fields()
+ .map(|it| it.name())
+ .map(|it| name_to_token(tm, it))
.collect::<Result<_, _>>()?,
),
- Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()),
+ Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
};
Ok(r)
}
@@ -141,7 +140,7 @@ enum AdtShape {
impl AdtShape {
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
- self.as_pattern_map(name, |x| quote!(#x))
+ self.as_pattern_map(name, |it| quote!(#it))
}
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
@@ -190,32 +189,19 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
-fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
- let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
- let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
- debug!("derive node didn't parse");
- ExpandError::other("invalid item definition")
- })?;
- let item = macro_items.items().next().ok_or_else(|| {
- debug!("no module item parsed");
- ExpandError::other("no item found")
- })?;
- let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| {
- debug!("expected adt, found: {:?}", item);
- ExpandError::other("expected struct, enum or union")
- })?;
+fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match &adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
- AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?),
+ AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
),
ast::Adt::Enum(it) => {
let default_variant = it
.variant_list()
.into_iter()
- .flat_map(|x| x.variants())
- .position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
+ .flat_map(|it| it.variants())
+ .position(|it| it.attrs().any(|it| it.simple_name() == Some("default".into())));
(
it.name(),
it.generic_param_list(),
@@ -224,11 +210,11 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
variants: it
.variant_list()
.into_iter()
- .flat_map(|x| x.variants())
- .map(|x| {
+ .flat_map(|it| it.variants())
+ .map(|it| {
Ok((
- name_to_token(&token_map, x.name())?,
- VariantShape::from(x.field_list(), &token_map)?,
+ name_to_token(tm, it.name())?,
+ VariantShape::from(tm, it.field_list())?,
))
})
.collect::<Result<_, ExpandError>>()?,
@@ -246,16 +232,16 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let name = {
let this = param.name();
match this {
- Some(x) => {
- param_type_set.insert(x.as_name());
- mbe::syntax_node_to_token_tree(x.syntax()).0
+ Some(it) => {
+ param_type_set.insert(it.as_name());
+ mbe::syntax_node_to_token_tree(it.syntax()).0
}
None => tt::Subtree::empty(),
}
};
let bounds = match &param {
- ast::TypeOrConstParam::Type(x) => {
- x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
+ ast::TypeOrConstParam::Type(it) => {
+ it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
}
ast::TypeOrConstParam::Const(_) => None,
};
@@ -296,9 +282,9 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
- .map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
+ .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
.collect();
- let name_token = name_to_token(&token_map, name)?;
+ let name_token = name_to_token(&tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
@@ -345,11 +331,12 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let info = match parse_adt(tt) {
+ let info = match parse_adt(tm, tt) {
Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
@@ -373,10 +360,10 @@ fn expand_simple_derive(
})
.unzip();
- where_block.extend(info.associated_types.iter().map(|x| {
- let x = x.clone();
+ where_block.extend(info.associated_types.iter().map(|it| {
+ let it = it.clone();
let bound = trait_path.clone();
- quote! { #x : #bound , }
+ quote! { #it : #bound , }
}));
let name = info.name;
@@ -405,19 +392,21 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::marker::Copy }, |_| quote! {})
+ expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::clone::Clone }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct {
char: '*',
@@ -444,7 +433,7 @@ fn clone_expand(
}
let name = &adt.name;
let patterns = adt.shape.as_pattern(name);
- let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() });
+ let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let fat_arrow = fat_arrow();
quote! {
@@ -479,10 +468,11 @@ fn and_and() -> ::tt::Subtree<TokenId> {
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::default::Default }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
@@ -518,16 +508,17 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
- let for_fields = fields.iter().map(|x| {
- let x_string = x.to_string();
+ let for_fields = fields.iter().map(|it| {
+ let x_string = it.to_string();
quote! {
- .field(#x_string, & #x)
+ .field(#x_string, & #it)
}
});
quote! {
@@ -535,9 +526,9 @@ fn debug_expand(
}
}
VariantShape::Tuple(n) => {
- let for_fields = tuple_field_iterator(*n).map(|x| {
+ let for_fields = tuple_field_iterator(*n).map(|it| {
quote! {
- .field( & #x)
+ .field( & #it)
}
});
quote! {
@@ -598,10 +589,11 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::hash::Hash }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
@@ -621,7 +613,7 @@ fn hash_expand(
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|(pat, names)| {
let expr = {
- let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
+ let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
quote! { {
##it
} }
@@ -632,9 +624,14 @@ fn hash_expand(
}
},
);
+ let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
+ quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
+ } else {
+ quote! {}
+ };
quote! {
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
- #krate::mem::discriminant(self).hash(ra_expand_state);
+ #check_discriminant
match self {
##arms
}
@@ -646,19 +643,21 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Eq }, |_| quote! {})
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
@@ -674,9 +673,9 @@ fn partial_eq_expand(
quote!(true)
}
[first, rest @ ..] => {
- let rest = rest.iter().map(|x| {
- let t1 = Ident::new(format!("{}_self", x.text), x.span);
- let t2 = Ident::new(format!("{}_other", x.text), x.span);
+ let rest = rest.iter().map(|it| {
+ let t1 = Ident::new(format!("{}_self", it.text), it.span);
+ let t2 = Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and();
quote!(#and_and #t1 .eq( #t2 ))
});
@@ -708,12 +707,12 @@ fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
- let self_patterns = adt.shape.as_pattern_map(name, |x| {
- let t = Ident::new(format!("{}_self", x.text), x.span);
+ let self_patterns = adt.shape.as_pattern_map(name, |it| {
+ let t = Ident::new(format!("{}_self", it.text), it.span);
quote!(#t)
});
- let other_patterns = adt.shape.as_pattern_map(name, |x| {
- let t = Ident::new(format!("{}_other", x.text), x.span);
+ let other_patterns = adt.shape.as_pattern_map(name, |it| {
+ let t = Ident::new(format!("{}_other", it.text), it.span);
quote!(#t)
});
(self_patterns, other_patterns)
@@ -722,10 +721,11 @@ fn self_and_other_patterns(
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
@@ -747,9 +747,6 @@ fn ord_expand(
// FIXME: Return expand error here
return quote!();
}
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
-
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|(pat1, pat2, fields)| {
@@ -764,17 +761,17 @@ fn ord_expand(
},
);
let fat_arrow = fat_arrow();
- let body = compare(
- krate,
- left,
- right,
- quote! {
- match (self, other) {
- ##arms
- _unused #fat_arrow #krate::cmp::Ordering::Equal
- }
- },
- );
+ let mut body = quote! {
+ match (self, other) {
+ ##arms
+ _unused #fat_arrow #krate::cmp::Ordering::Equal
+ }
+ };
+ if matches!(&adt.shape, AdtShape::Enum { .. }) {
+ let left = quote!(#krate::intrinsics::discriminant_value(self));
+ let right = quote!(#krate::intrinsics::discriminant_value(other));
+ body = compare(krate, left, right, body);
+ }
quote! {
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
@@ -786,10 +783,11 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index a9f0c154b..95c6baf42 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -339,7 +339,7 @@ fn format_args_expand_general(
parts.push(mem::take(&mut last_part));
let arg_tree = if argument.is_empty() {
match args.next() {
- Some(x) => x,
+ Some(it) => it,
None => {
err = Some(mbe::ExpandError::NoMatchingRule.into());
tt::Subtree::empty()
@@ -361,7 +361,7 @@ fn format_args_expand_general(
quote!(::core::fmt::Display::fmt)
}
};
- arg_tts.push(quote! { ::core::fmt::Argument::new(&(#arg_tree), #formatter), });
+ arg_tts.push(quote! { ::core::fmt::ArgumentV1::new(&(#arg_tree), #formatter), });
}
'}' => {
if format_iter.peek() == Some(&'}') {
@@ -378,11 +378,11 @@ fn format_args_expand_general(
if !last_part.is_empty() {
parts.push(last_part);
}
- let part_tts = parts.into_iter().map(|x| {
+ let part_tts = parts.into_iter().map(|it| {
let text = if let Some(raw) = &raw_sharps {
- format!("r{raw}\"{}\"{raw}", x).into()
+ format!("r{raw}\"{}\"{raw}", it).into()
} else {
- format!("\"{}\"", x).into()
+ format!("\"{}\"", it).into()
};
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
quote!(#l ,)
@@ -574,7 +574,7 @@ fn concat_bytes_expand(
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
syntax::SyntaxKind::BYTE_STRING => {
let components = unquote_byte_string(lit).unwrap_or_default();
- components.into_iter().for_each(|x| bytes.push(x.to_string()));
+ components.into_iter().for_each(|it| bytes.push(it.to_string()));
}
_ => {
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
@@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt(
arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
let loc = db.lookup_intern_macro_call(arg_id);
- let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
+ let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 78b2db730..5292a5fa1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,9 +1,9 @@
//! Defines database & queries for macro expansion.
-use base_db::{salsa, Edition, SourceDatabase};
+use base_db::{salsa, CrateId, Edition, SourceDatabase};
use either::Either;
use limit::Limit;
-use mbe::syntax_node_to_token_tree;
+use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
@@ -13,7 +13,7 @@ use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
- builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
+ builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
@@ -28,61 +28,67 @@ use crate::{
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
+/// Old-style `macro_rules` or the new macros 2.0
+pub struct DeclarativeMacroExpander {
+ pub mac: mbe::DeclarativeMacro,
+ pub def_site_token_map: mbe::TokenMap,
+}
+
+impl DeclarativeMacroExpander {
+ pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ match self.mac.err() {
+ Some(e) => ExpandResult::new(
+ tt::Subtree::empty(),
+ ExpandError::other(format!("invalid macro definition: {e}")),
+ ),
+ None => self.mac.expand(tt).map_err(Into::into),
+ }
+ }
+
+ pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
+ self.mac.map_id_down(token_id)
+ }
+
+ pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
+ self.mac.map_id_up(token_id)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
- /// Old-style `macro_rules` or the new macros 2.0
- DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
+ DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
- Builtin(BuiltinFnLikeExpander),
+ BuiltIn(BuiltinFnLikeExpander),
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
- BuiltinEager(EagerExpander),
+ BuiltInEager(EagerExpander),
/// `global_allocator` and such.
- BuiltinAttr(BuiltinAttrExpander),
+ BuiltInAttr(BuiltinAttrExpander),
/// `derive(Copy)` and such.
- BuiltinDerive(BuiltinDeriveExpander),
+ BuiltInDerive(BuiltinDeriveExpander),
/// The thing we love the most here in rust-analyzer -- procedural macros.
ProcMacro(ProcMacroExpander),
}
+// FIXME: Get rid of these methods
impl TokenExpander {
- fn expand(
- &self,
- db: &dyn ExpandDatabase,
- id: MacroCallId,
- tt: &tt::Subtree,
- ) -> ExpandResult<tt::Subtree> {
- match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
- TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
- TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
- TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
- TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
- TokenExpander::ProcMacro(_) => {
- // We store the result in salsa db to prevent non-deterministic behavior in
- // some proc-macro implementation
- // See #4315 for details
- db.expand_proc_macro(id)
- }
- }
- }
-
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
- TokenExpander::Builtin(..)
- | TokenExpander::BuiltinEager(..)
- | TokenExpander::BuiltinAttr(..)
- | TokenExpander::BuiltinDerive(..)
+ TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
+ TokenExpander::BuiltIn(..)
+ | TokenExpander::BuiltInEager(..)
+ | TokenExpander::BuiltInAttr(..)
+ | TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
- TokenExpander::Builtin(..)
- | TokenExpander::BuiltinEager(..)
- | TokenExpander::BuiltinAttr(..)
- | TokenExpander::BuiltinDerive(..)
+ TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
+ TokenExpander::BuiltIn(..)
+ | TokenExpander::BuiltInEager(..)
+ | TokenExpander::BuiltInAttr(..)
+ | TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
@@ -118,14 +124,26 @@ pub trait ExpandDatabase: SourceDatabase {
fn macro_arg(
&self,
id: MacroCallId,
- ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
+ ) -> ValueResult<
+ Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
+ Arc<Box<[SyntaxError]>>,
+ >;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
- fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
- /// Gets the expander for this macro. This compiles declarative macros, and
- /// just fetches procedural ones.
- fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
+ fn macro_arg_node(
+ &self,
+ id: MacroCallId,
+ ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
+ /// Fetches the expander for this macro.
+ #[salsa::transparent]
+ fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
+ /// Fetches (and compiles) the expander of this decl macro.
+ fn decl_macro_expander(
+ &self,
+ def_crate: CrateId,
+ id: AstId<ast::Macro>,
+ ) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree.
// This query is LRU cached
@@ -141,8 +159,8 @@ pub trait ExpandDatabase: SourceDatabase {
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
- /// @edwin0cheng heroically debugged this once!
- fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
+ /// @edwin0cheng heroically debugged this once! See #4315 for details
+ fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
fn parse_macro_expansion_error(
&self,
@@ -163,7 +181,6 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
- let macro_def = db.macro_def(loc.def).ok()?;
let token_range = token_to_map.text_range();
// Build the subtree and token mapping for the speculative args
@@ -221,7 +238,12 @@ pub fn expand_speculative(
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
- macro_def.map_id_down(token_id)
+ match loc.def.kind {
+ MacroDefKind::Declarative(it) => {
+ db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
+ }
+ _ => token_id,
+ }
}
};
@@ -235,7 +257,17 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
}
- _ => macro_def.expand(db, actual_macro_call, &tt),
+ MacroDefKind::BuiltInDerive(expander, ..) => {
+ // this cast is a bit sus, can we avoid losing the typedness here?
+ let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
+ expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
+ }
+ MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
+ MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
+ MacroDefKind::BuiltInEager(it, _) => {
+ it.expand(db, actual_macro_call, &tt).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);
@@ -297,17 +329,31 @@ fn parse_macro_expansion(
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
+fn parse_macro_expansion_error(
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+) -> ExpandResult<Box<[SyntaxError]>> {
+ db.parse_macro_expansion(MacroFile { macro_call_id })
+ .map(|it| it.0.errors().to_vec().into_boxed_slice())
+}
+
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
-) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
+) -> ValueResult<
+ Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
+ Arc<Box<[SyntaxError]>>,
+> {
let loc = db.lookup_intern_macro_call(id);
- if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
- return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
+ if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
+ return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
}
- let arg = db.macro_arg_text(id)?;
+ let ValueResult { value, err } = db.macro_arg_node(id);
+ let Some(arg) = value else {
+ return ValueResult { value: None, err };
+ };
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
@@ -325,9 +371,16 @@ fn macro_arg(
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified();
}
- Some(Arc::new((tt, tmap, fixups.undo_info)))
+ let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
+ match err {
+ Some(err) => ValueResult::new(val, err),
+ None => ValueResult::ok(val),
+ }
}
+/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
+/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
+/// - attributes expect the invoking attribute to be stripped
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
// FIXME: handle `cfg_attr`
(|| {
@@ -364,9 +417,43 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
-fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
+fn macro_arg_node(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
+ let err = || -> Arc<Box<[_]>> {
+ Arc::new(Box::new([SyntaxError::new_at_offset(
+ "invalid macro call".to_owned(),
+ syntax::TextSize::from(0),
+ )]))
+ };
let loc = db.lookup_intern_macro_call(id);
- let arg = loc.kind.arg(db)?;
+ let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
+ let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
+ Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
+ } else {
+ loc.kind
+ .arg(db)
+ .and_then(|arg| ast::TokenTree::cast(arg.value))
+ .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
+ };
+ match res {
+ Some(res) if res.errors().is_empty() => res.syntax_node(),
+ Some(res) => {
+ return ValueResult::new(
+ Some(res.syntax_node().green().into()),
+ // Box::<[_]>::from(res.errors()), not stable yet
+ Arc::new(res.errors().to_vec().into_boxed_slice()),
+ );
+ }
+ None => return ValueResult::only_err(err()),
+ }
+ } else {
+ match loc.kind.arg(db) {
+ Some(res) => res.value,
+ None => return ValueResult::only_err(err()),
+ }
+ };
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
@@ -381,101 +468,146 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
- return None;
+ return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
+ "unbalanced token tree".to_owned(),
+ arg.text_range(),
+ )])));
}
}
- if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
- Some(
- mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
- .0
- .syntax_node()
- .green()
- .into(),
- )
- } else {
- Some(arg.green().into())
- }
+ ValueResult::ok(Some(arg.green().into()))
}
-fn macro_def(
+fn decl_macro_expander(
db: &dyn ExpandDatabase,
- id: MacroDefId,
-) -> Result<Arc<TokenExpander>, mbe::ParseError> {
+ def_crate: CrateId,
+ id: AstId<ast::Macro>,
+) -> Arc<DeclarativeMacroExpander> {
+ let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
+ let (mac, def_site_token_map) = match id.to_node(db) {
+ ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
+ Some(arg) => {
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
+ (mac, def_site_token_map)
+ }
+ None => (
+ mbe::DeclarativeMacro::from_err(
+ mbe::ParseError::Expected("expected a token tree".into()),
+ is_2021,
+ ),
+ Default::default(),
+ ),
+ },
+ ast::Macro::MacroDef(macro_def) => match macro_def.body() {
+ Some(arg) => {
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
+ (mac, def_site_token_map)
+ }
+ None => (
+ mbe::DeclarativeMacro::from_err(
+ mbe::ParseError::Expected("expected a token tree".into()),
+ is_2021,
+ ),
+ Default::default(),
+ ),
+ },
+ };
+ Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
+}
+
+fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
- let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021;
- let (mac, def_site_token_map) = match ast_id.to_node(db) {
- ast::Macro::MacroRules(macro_rules) => {
- let arg = macro_rules
- .token_tree()
- .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?;
- (mac, def_site_token_map)
- }
- ast::Macro::MacroDef(macro_def) => {
- let arg = macro_def
- .body()
- .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?;
- (mac, def_site_token_map)
- }
- };
- Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
- }
- MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
- MacroDefKind::BuiltInAttr(expander, _) => {
- Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
+ TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
}
- MacroDefKind::BuiltInDerive(expander, _) => {
- Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
- }
- MacroDefKind::BuiltInEager(expander, ..) => {
- Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
- }
- MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
+ MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
+ MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
+ MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
+ MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
+ MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
}
}
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
- if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
- // This is an input expansion for an eager macro. These are already pre-expanded
- return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
- }
- let expander = match db.macro_def(loc.def) {
- Ok(it) => it,
- // FIXME: We should make sure to enforce a variant that invalid macro
- // definitions do not get expanders that could reach this call path!
- Err(err) => {
- return ExpandResult {
- value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![],
- }),
- err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
- }
+
+ let ExpandResult { value: tt, mut err } = match loc.def.kind {
+ MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
+ MacroDefKind::BuiltInDerive(expander, ..) => {
+ let arg = db.macro_arg_node(id).value.unwrap();
+
+ let node = SyntaxNode::new_root(arg);
+ let censor = censor_for_macro_input(&loc, &node);
+ let mut fixups = fixup::fixup_syntax(&node);
+ fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
+ &node,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ // this cast is a bit sus, can we avoid losing the typedness here?
+ let adt = ast::Adt::cast(node).unwrap();
+ let mut res = expander.expand(db, id, &adt, &tmap);
+ fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
+ res
+ }
+ _ => {
+ let ValueResult { value, err } = db.macro_arg(id);
+ let Some(macro_arg) = value else {
+ return ExpandResult {
+ value: Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: Vec::new(),
+ }),
+ // FIXME: We should make sure to enforce an invariant that invalid macro
+ // calls do not reach this call path!
+ err: Some(ExpandError::other("invalid token tree")),
+ };
+ };
+
+ let (arg, arg_tm, undo_info) = &*macro_arg;
+ let mut res = match loc.def.kind {
+ MacroDefKind::Declarative(id) => {
+ db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
+ }
+ MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
+ // This might look a bit odd, but we do not expand the inputs to eager macros here.
+ // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
+ // That kind of expansion uses the ast id map of an eager macros input though which goes through
+ // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
+ // will end up going through here again, whereas we want to just want to inspect the raw input.
+ // As such we just return the input subtree here.
+ MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
+ let mut arg = arg.clone();
+ fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
+
+ return ExpandResult {
+ value: Arc::new(arg),
+ err: err.map(|err| {
+ let mut buf = String::new();
+ for err in &**err {
+ use std::fmt::Write;
+ _ = write!(buf, "{}, ", err);
+ }
+ buf.pop();
+ buf.pop();
+ ExpandError::other(buf)
+ }),
+ };
+ }
+ MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
+ MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
+ _ => unreachable!(),
+ };
+ fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
+ res
}
};
- let Some(macro_arg) = db.macro_arg(id) else {
- return ExpandResult {
- value: Arc::new(
- tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: Vec::new(),
- },
- ),
- // FIXME: We should make sure to enforce a variant that invalid macro
- // calls do not reach this call path!
- err: Some(ExpandError::other(
- "invalid token tree"
- )),
- };
- };
- let (arg_tt, arg_tm, undo_info) = &*macro_arg;
- let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt);
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
// FIXME: We should report both errors!
@@ -483,48 +615,29 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}
// Set a hard limit for the expanded tt
- let count = tt.count();
- if TOKEN_LIMIT.check(count).is_err() {
- return ExpandResult {
- value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![],
- }),
- err: Some(ExpandError::other(format!(
- "macro invocation exceeds token limit: produced {} tokens, limit is {}",
- count,
- TOKEN_LIMIT.inner(),
- ))),
- };
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
}
- fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
-
ExpandResult { value: Arc::new(tt), err }
}
-fn parse_macro_expansion_error(
- db: &dyn ExpandDatabase,
- macro_call_id: MacroCallId,
-) -> ExpandResult<Box<[SyntaxError]>> {
- db.parse_macro_expansion(MacroFile { macro_call_id })
- .map(|it| it.0.errors().to_vec().into_boxed_slice())
-}
-
-fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
+fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
- let Some(macro_arg) = db.macro_arg(id) else {
+ let Some(macro_arg) = db.macro_arg(id).value else {
return ExpandResult {
- value: tt::Subtree {
+ value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: Vec::new(),
- },
- err: Some(ExpandError::other(
- "invalid token tree"
- )),
+ }),
+ // FIXME: We should make sure to enforce an invariant that invalid macro
+ // calls do not reach this call path!
+ err: Some(ExpandError::other("invalid token tree")),
};
};
+ let (arg_tt, arg_tm, undo_info) = &*macro_arg;
+
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
@@ -533,13 +646,23 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<t
let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
- mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
+ mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
_ => None,
};
- expander.expand(db, loc.def.krate, loc.krate, &macro_arg.0, attr_arg.as_ref())
+ let ExpandResult { value: mut tt, err } =
+ expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
+
+ // Set a hard limit for the expanded tt
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
+ }
+
+ fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
+
+ ExpandResult { value: Arc::new(tt), err }
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
@@ -563,3 +686,22 @@ fn token_tree_to_syntax_node(
};
mbe::token_tree_to_syntax_node(tt, entry_point)
}
+
+fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
+ let count = tt.count();
+ if TOKEN_LIMIT.check(count).is_err() {
+ Err(ExpandResult {
+ value: Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: vec![],
+ }),
+ err: Some(ExpandError::other(format!(
+ "macro invocation exceeds token limit: produced {} tokens, limit is {}",
+ count,
+ TOKEN_LIMIT.inner(),
+ ))),
+ })
+ } else {
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index 7ee3fd375..4110f2847 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -19,7 +19,8 @@
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
-use syntax::{ted, Parse, SyntaxNode};
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
@@ -28,7 +29,7 @@ use crate::{
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
+ MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@@ -37,20 +38,9 @@ pub fn expand_eager_macro_input(
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
- assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
- let token_tree = macro_call.value.token_tree();
-
- let Some(token_tree) = token_tree else {
- return Ok(ExpandResult { value: None, err:
- Some(ExpandError::other(
- "invalid token tree"
- )),
- });
- };
- let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
-
+) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
+ // the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
let expand_to = ExpandTo::from_call_site(&macro_call.value);
@@ -61,47 +51,80 @@ pub fn expand_eager_macro_input(
let arg_id = db.intern_macro_call(MacroCallLoc {
def,
krate,
- eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((parsed_args, arg_token_map)),
- arg_id: None,
- error: None,
- })),
+ eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
});
- let arg_as_expr = match db.macro_arg_text(arg_id) {
- Some(it) => it,
- None => {
- return Ok(ExpandResult {
- value: None,
- err: Some(ExpandError::other("invalid token tree")),
- })
- }
+ let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
+ db.parse_macro_expansion(arg_id.as_macro_file());
+ // we need this map here as the expansion of the eager input fake file loses whitespace ...
+ let mut ws_mapping = FxHashMap::default();
+ if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
+ ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
+ Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
+ }));
+ }
+
+ let ExpandResult { value: expanded_eager_input, err } = {
+ eager_macro_recur(
+ db,
+ &Hygiene::new(db, macro_call.file_id),
+ InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
+ krate,
+ resolver,
+ )
};
- let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
- db,
- &Hygiene::new(db, macro_call.file_id),
- InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
- krate,
- resolver,
- )?;
- let Some(expanded_eager_input) = expanded_eager_input else {
- return Ok(ExpandResult { value: None, err })
+ let err = parse_err.or(err);
+
+ let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
+ return ExpandResult { value: None, err };
+ };
+
+ let (mut subtree, expanded_eager_input_token_map) =
+ mbe::syntax_node_to_token_tree(&expanded_eager_input);
+
+ let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
+ let mut ids_used = FxHashSet::default();
+ let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
+ // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
+ // so we need to remap them to the original input of the eager macro.
+ subtree.visit_ids(&mut |id| {
+ // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
+
+ if let Some(range) = expanded_eager_input_token_map
+ .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
+ {
+ // remap from expanded eager input to eager input expansion
+ if let Some(og_range) = mapping.get(&range) {
+ // remap from eager input expansion to original eager input
+ if let Some(&og_range) = ws_mapping.get(og_range) {
+ if let Some(og_token) = og_tmap.token_by_range(og_range) {
+ ids_used.insert(og_token);
+ return og_token;
+ }
+ }
+ }
+ }
+ tt::TokenId::UNSPECIFIED
+ });
+ og_tmap.filter(|id| ids_used.contains(&id));
+ og_tmap
+ } else {
+ Default::default()
};
- let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
subtree.delimiter = crate::tt::Delimiter::unspecified();
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((subtree, token_map)),
- arg_id: Some(arg_id),
+ arg: Arc::new((subtree, og_tmap)),
+ arg_id,
error: err.clone(),
})),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
};
- Ok(ExpandResult { value: Some(db.intern_macro_call(loc)), err })
+ ExpandResult { value: Some(db.intern_macro_call(loc)), err }
}
fn lazy_expand(
@@ -109,19 +132,16 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
-) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
+) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
- let id = def.as_lazy_macro(
- db,
- krate,
- MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
- );
-
+ let ast_id = macro_call.with_value(ast_id);
+ let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
let macro_file = id.as_macro_file();
- db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
+ db.parse_macro_expansion(macro_file)
+ .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
}
fn eager_macro_recur(
@@ -130,19 +150,51 @@ fn eager_macro_recur(
curr: InFile<SyntaxNode>,
krate: CrateId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> {
+) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
let original = curr.value.clone_for_update();
+ let mut mapping = FxHashMap::default();
- let children = original.descendants().filter_map(ast::MacroCall::cast);
let mut replacements = Vec::new();
- // Note: We only report a single error inside of eager expansions
+ // FIXME: We only report a single error inside of eager expansions
let mut error = None;
+ let mut offset = 0i32;
+ let apply_offset = |it: TextSize, offset: i32| {
+ TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
+ };
+ let mut children = original.preorder_with_tokens();
// Collect replacement
- for child in children {
- let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
- Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
+ while let Some(child) = children.next() {
+ let WalkEvent::Enter(child) = child else { continue };
+ let call = match child {
+ syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
+ Some(it) => {
+ children.skip_subtree();
+ it
+ }
+ None => continue,
+ },
+ syntax::NodeOrToken::Token(t) => {
+ mapping.insert(
+ TextRange::new(
+ apply_offset(t.text_range().start(), offset),
+ apply_offset(t.text_range().end(), offset),
+ ),
+ t.text_range(),
+ );
+ continue;
+ }
+ };
+ let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+ Some(path) => match macro_resolver(path.clone()) {
+ Some(def) => def,
+ None => {
+ error =
+ Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
+ continue;
+ }
+ },
None => {
error = Some(ExpandError::other("malformed macro invocation"));
continue;
@@ -150,20 +202,32 @@ fn eager_macro_recur(
};
let ExpandResult { value, err } = match def.kind {
MacroDefKind::BuiltInEager(..) => {
- let ExpandResult { value, err } = match expand_eager_macro_input(
+ let ExpandResult { value, err } = expand_eager_macro_input(
db,
krate,
- curr.with_value(child.clone()),
+ curr.with_value(call.clone()),
def,
macro_resolver,
- ) {
- Ok(it) => it,
- Err(err) => return Err(err),
- };
+ );
match value {
- Some(call) => {
+ Some(call_id) => {
let ExpandResult { value, err: err2 } =
- db.parse_macro_expansion(call.as_macro_file());
+ db.parse_macro_expansion(call_id.as_macro_file());
+
+ if let Some(tt) = call.token_tree() {
+ let call_tt_start = tt.syntax().text_range().start();
+ let call_start =
+ apply_offset(call.syntax().text_range().start(), offset);
+ if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
+ mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
+ value
+ .1
+ .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
+ .map(|r| (r + call_start, range + call_tt_start))
+ }));
+ }
+ }
+
ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()),
err: err.or(err2),
@@ -177,36 +241,63 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInAttr(..)
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
- let ExpandResult { value, err } =
- lazy_expand(db, &def, curr.with_value(child.clone()), krate);
+ let ExpandResult { value: (parse, tm), err } =
+ lazy_expand(db, &def, curr.with_value(call.clone()), krate);
+ let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
+ Some(db.decl_macro_expander(def.krate, ast_id))
+ } else {
+ None
+ };
// replace macro inside
- let hygiene = Hygiene::new(db, value.file_id);
+ let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
&hygiene,
// FIXME: We discard parse errors here
- value.map(|it| it.syntax_node()),
+ parse.as_ref().map(|it| it.syntax_node()),
krate,
macro_resolver,
- )?;
+ );
let err = err.or(error);
- ExpandResult { value, err }
+
+ if let Some(tt) = call.token_tree() {
+ let call_tt_start = tt.syntax().text_range().start();
+ let call_start = apply_offset(call.syntax().text_range().start(), offset);
+ if let Some((_tt, arg_map, _)) = parse
+ .file_id
+ .macro_file()
+ .and_then(|id| db.macro_arg(id.macro_call_id).value)
+ .as_deref()
+ {
+ mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
+ tm.first_range_by_token(
+ decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
+ syntax::SyntaxKind::TOMBSTONE,
+ )
+ .map(|r| (r + call_start, range + call_tt_start))
+ }));
+ }
+ }
+ // FIXME: Do we need to re-use _m here?
+ ExpandResult { value: value.map(|(n, _m)| n), err }
}
};
if err.is_some() {
error = err;
}
// check if the whole original syntax is replaced
- if child.syntax() == &original {
- return Ok(ExpandResult { value, err: error });
+ if call.syntax() == &original {
+ return ExpandResult { value: value.zip(Some(mapping)), err: error };
}
if let Some(insert) = value {
- replacements.push((child, insert));
+ offset += u32::from(insert.text_range().len()) as i32
+ - u32::from(call.syntax().text_range().len()) as i32;
+ replacements.push((call, insert));
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
- Ok(ExpandResult { value: Some(original), err: error })
+ ExpandResult { value: Some((original, mapping)), err: error }
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index 00796e7c0..e6e8d8c02 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups {
/// This is the information needed to reverse the fixups.
#[derive(Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
- original: Vec<Subtree>,
+ original: Box<[Subtree]>,
}
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
@@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
replace,
token_map,
next_id,
- undo_info: SyntaxFixupUndoInfo { original },
+ undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
}
}
@@ -472,13 +472,13 @@ fn foo () {match __ra_fixup {}}
check(
r#"
fn foo() {
- match x {
+ match it {
}
}
"#,
expect![[r#"
-fn foo () {match x {}}
+fn foo () {match it {}}
"#]],
)
}
@@ -547,11 +547,11 @@ fn foo () {a . __ra_fixup ; bar () ;}
check(
r#"
fn foo() {
- let x = a
+ let it = a
}
"#,
expect![[r#"
-fn foo () {let x = a ;}
+fn foo () {let it = a ;}
"#]],
)
}
@@ -561,11 +561,11 @@ fn foo () {let x = a ;}
check(
r#"
fn foo() {
- let x = a.
+ let it = a.
}
"#,
expect![[r#"
-fn foo () {let x = a . __ra_fixup ;}
+fn foo () {let it = a . __ra_fixup ;}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index 10f8fe9ce..ade4a5928 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -126,7 +126,7 @@ struct HygieneInfo {
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
- macro_def: Arc<TokenExpander>,
+ macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
@@ -149,19 +149,15 @@ impl HygieneInfo {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def_start?)
}
- None => (
- &self.macro_arg.1,
- InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
- ),
+ None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
- mbe::Origin::Call => (
- &self.macro_arg.1,
- InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
- ),
- mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
- (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
- (def_site_token_map, *tt)
+ mbe::Origin::Call => {
+ (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
+ }
+ mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
+ (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
+ (&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
@@ -177,7 +173,7 @@ fn make_hygiene_info(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
-) -> Option<HygieneInfo> {
+) -> HygieneInfo {
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
@@ -198,9 +194,9 @@ fn make_hygiene_info(
_ => None,
});
- let macro_def = db.macro_def(loc.def).ok()?;
+ let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
+ let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
@@ -208,7 +204,7 @@ fn make_hygiene_info(
))
});
- Some(HygieneInfo {
+ HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
@@ -216,7 +212,7 @@ fn make_hygiene_info(
macro_arg,
macro_def,
exp_map,
- })
+ }
}
impl HygieneFrame {
@@ -225,8 +221,7 @@ impl HygieneFrame {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let info =
- make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info));
+ let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
@@ -240,17 +235,14 @@ impl HygieneFrame {
}
};
- let (calling_file, info) = match info {
- None => {
- return HygieneFrame {
- expansion: None,
- local_inner,
- krate,
- call_site: None,
- def_site: None,
- };
+ let Some((info, calling_file)) = info else {
+ return HygieneFrame {
+ expansion: None,
+ local_inner,
+ krate,
+ call_site: None,
+ def_site: None,
}
- Some(it) => it,
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index e0c199328..1f1e20f49 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -37,11 +37,11 @@ use either::Either;
use syntax::{
algo::{self, skip_trivia_token},
ast::{self, AstNode, HasDocComments},
- Direction, SyntaxNode, SyntaxToken,
+ AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken,
};
use crate::{
- ast_id_map::FileAstId,
+ ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
attrs::AttrId,
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
@@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId);
pub struct MacroCallLoc {
pub def: MacroDefId,
pub(crate) krate: CrateId,
- /// Some if `def` is a builtin eager macro.
+ /// Some if this is a macro call for an eager macro. Note that this is `None`
+ /// for the eager input macro file.
eager: Option<Box<EagerCallInfo>>,
pub kind: MacroCallKind,
}
@@ -152,11 +153,10 @@ pub enum MacroDefKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct EagerCallInfo {
- /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
+ /// The expanded argument of the eager macro.
arg: Arc<(tt::Subtree, TokenMap)>,
- /// call id of the eager macro's input file. If this is none, macro call containing this call info
- /// is an eager macro's input, otherwise it is its output.
- arg_id: Option<MacroCallId>,
+ /// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
+ arg_id: MacroCallId,
error: Option<ExpandError>,
}
@@ -221,11 +221,7 @@ impl HirFileId {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- let is_include_expansion = loc.def.is_include()
- && matches!(
- loc.eager.as_deref(),
- Some(EagerCallInfo { arg_id: Some(_), .. })
- );
+ let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
Some(Ok((_, file))) => file.into(),
_ => loc.kind.file_id(),
@@ -270,57 +266,13 @@ impl HirFileId {
/// Return expansion information if it is a macro-expansion file
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
let macro_file = self.macro_file()?;
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-
- let arg_tt = loc.kind.arg(db)?;
-
- let macro_def = db.macro_def(loc.def).ok()?;
- let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
- });
-
- let def = loc.def.ast_id().left().and_then(|id| {
- let def_tt = match id.to_node(db) {
- ast::Macro::MacroRules(mac) => mac.token_tree()?,
- ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => {
- return None
- }
- ast::Macro::MacroDef(mac) => mac.body()?,
- };
- Some(InFile::new(id.file_id, def_tt))
- });
- let attr_input_or_mac_def = def.or_else(|| match loc.kind {
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
- .token_tree()?;
- Some(InFile::new(ast_id.file_id, tt))
- }
- _ => None,
- });
-
- Some(ExpansionInfo {
- expanded: InFile::new(self, parse.syntax_node()),
- arg: InFile::new(loc.kind.file_id(), arg_tt),
- attr_input_or_mac_def,
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
- macro_arg,
- macro_def,
- exp_map,
- })
+ ExpansionInfo::new(db, macro_file)
}
- /// Indicate it is macro file generated for builtin derive
- pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive_attr_node(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind {
@@ -333,8 +285,22 @@ impl HirFileId {
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
+ matches!(
+ db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
+ MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
+ }
+ None => false,
+ }
+ }
+
+ pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ match self.macro_file() {
+ Some(macro_file) => {
+ matches!(
+ db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
+ MacroDefKind::BuiltInDerive(..)
+ )
}
None => false,
}
@@ -344,8 +310,7 @@ impl HirFileId {
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- loc.def.is_include()
+ db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
}
_ => false,
}
@@ -355,7 +320,7 @@ impl HirFileId {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
+ matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
_ => false,
}
@@ -450,6 +415,24 @@ impl MacroDefId {
)
}
+ pub fn is_derive(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltInDerive(..)
+ | MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
+ }
+
+ pub fn is_fn_like(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltIn(..)
+ | MacroDefKind::ProcMacro(_, ProcMacroKind::FuncLike, _)
+ | MacroDefKind::BuiltInEager(..)
+ | MacroDefKind::Declarative(..)
+ )
+ }
+
pub fn is_attribute_derive(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
}
@@ -536,9 +519,9 @@ impl MacroCallKind {
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
- MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
- MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::Derive { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).text_range(),
};
FileRange { range, file_id }
@@ -588,13 +571,18 @@ impl MacroCallKind {
FileRange { range, file_id }
}
- fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
+ fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
match self {
- MacroCallKind::FnLike { ast_id, .. } => {
- Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+ MacroCallKind::FnLike { ast_id, .. } => ast_id
+ .to_in_file_node(db)
+ .map(|it| Some(it.token_tree()?.syntax().clone()))
+ .transpose(),
+ MacroCallKind::Derive { ast_id, .. } => {
+ Some(ast_id.to_in_file_node(db).syntax().cloned())
+ }
+ MacroCallKind::Attr { ast_id, .. } => {
+ Some(ast_id.to_in_file_node(db).syntax().cloned())
}
- MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
- MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
}
}
}
@@ -612,13 +600,13 @@ impl MacroCallId {
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
- expanded: InFile<SyntaxNode>,
+ expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
arg: InFile<SyntaxNode>,
/// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
- macro_def: Arc<TokenExpander>,
+ macro_def: TokenExpander,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
@@ -628,7 +616,7 @@ pub struct ExpansionInfo {
impl ExpansionInfo {
pub fn expanded(&self) -> InFile<SyntaxNode> {
- self.expanded.clone()
+ self.expanded.clone().into()
}
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
@@ -659,7 +647,7 @@ impl ExpansionInfo {
let token_id_in_attr_input = if let Some(item) = item {
// check if we are mapping down in an attribute input
// this is a special case as attributes can have two inputs
- let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let call_id = self.expanded.file_id.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
let token_range = token.value.text_range();
@@ -705,7 +693,7 @@ impl ExpansionInfo {
let relative_range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
- // conditionally shift the id by a declaratives macro definition
+ // conditionally shift the id by a declarative macro definition
self.macro_def.map_id_down(token_id)
}
};
@@ -715,7 +703,7 @@ impl ExpansionInfo {
.ranges_by_token(token_id, token.value.kind())
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
- Some(tokens.map(move |token| self.expanded.with_value(token)))
+ Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
}
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
@@ -724,18 +712,17 @@ impl ExpansionInfo {
db: &dyn db::ExpandDatabase,
token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> {
+ assert_eq!(token.file_id, self.expanded.file_id.into());
// Fetch the id through its text range,
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
// conditionally unshifting the id to accommodate for macro-rules def site
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
- let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let call_id = self.expanded.file_id.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
// Special case: map tokens from `include!` expansions to the included file
- if loc.def.is_include()
- && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
- {
+ if loc.def.is_include() {
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
let source = db.parse(file_id);
@@ -765,9 +752,9 @@ impl ExpansionInfo {
}
_ => match origin {
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
- mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
- (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
- (def_site_token_map, tt.syntax().cloned())
+ mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
+ (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
+ (&expander.def_site_token_map, tt.syntax().cloned())
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
@@ -779,6 +766,58 @@ impl ExpansionInfo {
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
Some((tt.with_value(token), origin))
}
+
+ fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ let arg_tt = loc.kind.arg(db)?;
+
+ let macro_def = db.macro_expander(loc.def);
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
+ let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
+
+ let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
+ Arc::new((
+ tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
+ Default::default(),
+ Default::default(),
+ ))
+ });
+
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
+ return None
+ }
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index.ast_index())
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ Some(ExpansionInfo {
+ expanded,
+ arg: arg_tt,
+ attr_input_or_mac_def,
+ macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+ }
}
/// `AstId` points to an AST node in any file.
@@ -786,10 +825,26 @@ impl ExpansionInfo {
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = InFile<FileAstId<N>>;
-impl<N: AstNode> AstId<N> {
+impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
- let root = db.parse_or_expand(self.file_id);
- db.ast_id_map(self.file_id).get(self.value).to_node(&root)
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
+ InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
+ db.ast_id_map(self.file_id).get(self.value)
+ }
+}
+
+pub type ErasedAstId = InFile<ErasedFileAstId>;
+
+impl ErasedAstId {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
+ db.ast_id_map(self.file_id).get_raw(self.value)
}
}
@@ -850,7 +905,7 @@ impl<L, R> InFile<Either<L, R>> {
}
}
-impl<'a> InFile<&'a SyntaxNode> {
+impl InFile<&SyntaxNode> {
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
@@ -1011,6 +1066,18 @@ impl InFile<SyntaxToken> {
}
}
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InMacroFile<T> {
+ pub file_id: MacroFile,
+ pub value: T,
+}
+
+impl<T> From<InMacroFile<T>> for InFile<T> {
+ fn from(macro_file: InMacroFile<T>) -> Self {
+ InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
+ }
+}
+
fn ascend_node_border_tokens(
db: &dyn db::ExpandDatabase,
InFile { file_id, value: node }: InFile<&SyntaxNode>,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 47a8ab7de..69aa09c4a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -126,7 +126,7 @@ struct Display<'a> {
path: &'a ModPath,
}
-impl<'a> fmt::Display for Display<'a> {
+impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
display_fmt_path(self.db, self.path, f, true)
}
@@ -137,7 +137,7 @@ struct UnescapedDisplay<'a> {
path: &'a UnescapedModPath<'a>,
}
-impl<'a> fmt::Display for UnescapedDisplay<'a> {
+impl fmt::Display for UnescapedDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
display_fmt_path(self.db, self.path.0, f, false)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index f8dbb8427..7c179c0cf 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -24,7 +24,7 @@ enum Repr {
TupleField(usize),
}
-impl<'a> UnescapedName<'a> {
+impl UnescapedName<'_> {
/// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over
/// [`ToString::to_string`] if possible as this conversion is cheaper in the general case.
pub fn to_smol_str(&self) -> SmolStr {
@@ -40,7 +40,7 @@ impl<'a> UnescapedName<'a> {
}
}
- pub fn display(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
+ pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ {
_ = db;
UnescapedDisplay { name: self }
}
@@ -96,6 +96,15 @@ impl Name {
Name::new_inline("[missing name]")
}
+ /// Returns true if this is a fake name for things missing in the source code. See
+ /// [`missing()`][Self::missing] for details.
+ ///
+ /// Use this method instead of comparing with `Self::missing()` as missing names
+ /// (ideally should) have a `gensym` semantics.
+ pub fn is_missing(&self) -> bool {
+ self == &Name::missing()
+ }
+
/// Generates a new name which is only equal to itself, by incrementing a counter. Due
/// its implementation, it should not be used in things that salsa considers, like
/// type names or field names, and it should be only used in names of local variables
@@ -162,7 +171,7 @@ struct Display<'a> {
name: &'a Name,
}
-impl<'a> fmt::Display for Display<'a> {
+impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.name.0 {
Repr::Text(text) => fmt::Display::fmt(&text, f),
@@ -175,7 +184,7 @@ struct UnescapedDisplay<'a> {
name: &'a UnescapedName<'a>,
}
-impl<'a> fmt::Display for UnescapedDisplay<'a> {
+impl fmt::Display for UnescapedDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.name.0 .0 {
Repr::Text(text) => {
@@ -282,8 +291,10 @@ pub mod known {
alloc,
iter,
ops,
+ fmt,
future,
result,
+ string,
boxed,
option,
prelude,
@@ -311,6 +322,7 @@ pub mod known {
RangeToInclusive,
RangeTo,
Range,
+ String,
Neg,
Not,
None,
@@ -321,6 +333,7 @@ pub mod known {
iter_mut,
len,
is_empty,
+ as_str,
new,
// Builtin macros
asm,
@@ -334,6 +347,7 @@ pub mod known {
core_panic,
env,
file,
+ format,
format_args_nl,
format_args,
global_asm,
@@ -365,6 +379,7 @@ pub mod known {
cfg_eval,
crate_type,
derive,
+ derive_const,
global_allocator,
no_core,
no_std,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index c8bea3450..abc19d63a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -19,14 +19,15 @@ bitflags = "2.1.0"
smallvec.workspace = true
ena = "0.14.0"
either = "1.7.0"
+oorandom = "11.1.3"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.91.0", default-features = false }
-chalk-ir = "0.91.0"
-chalk-recursive = { version = "0.91.0", default-features = false }
-chalk-derive = "0.91.0"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+chalk-solve = { version = "0.92.0", default-features = false }
+chalk-ir = "0.92.0"
+chalk-recursive = { version = "0.92.0", default-features = false }
+chalk-derive = "0.92.0"
+la-arena.workspace = true
once_cell = "1.17.0"
triomphe.workspace = true
nohash-hasher.workspace = true
@@ -47,7 +48,6 @@ limit.workspace = true
expect-test = "1.4.0"
tracing = "0.1.35"
tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "env-filter",
"registry",
] }
tracing-tree = "0.2.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
index 3860bccec..4625a3b01 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -36,7 +36,7 @@ pub fn autoderef(
) -> impl Iterator<Item = Ty> {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
- let mut autoderef = Autoderef::new(&mut table, ty);
+ let mut autoderef = Autoderef::new(&mut table, ty, false);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
@@ -63,12 +63,13 @@ pub(crate) struct Autoderef<'a, 'db> {
ty: Ty,
at_start: bool,
steps: Vec<(AutoderefKind, Ty)>,
+ explicit: bool,
}
impl<'a, 'db> Autoderef<'a, 'db> {
- pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self {
+ pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
let ty = table.resolve_ty_shallow(&ty);
- Autoderef { table, ty, at_start: true, steps: Vec::new() }
+ Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit }
}
pub(crate) fn step_count(&self) -> usize {
@@ -97,7 +98,7 @@ impl Iterator for Autoderef<'_, '_> {
return None;
}
- let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?;
+ let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?;
self.steps.push((kind, self.ty.clone()));
self.ty = new_ty;
@@ -109,8 +110,9 @@ impl Iterator for Autoderef<'_, '_> {
pub(crate) fn autoderef_step(
table: &mut InferenceTable<'_>,
ty: Ty,
+ explicit: bool,
) -> Option<(AutoderefKind, Ty)> {
- if let Some(derefed) = builtin_deref(table, &ty, false) {
+ if let Some(derefed) = builtin_deref(table, &ty, explicit) {
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
} else {
Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
@@ -124,7 +126,6 @@ pub(crate) fn builtin_deref<'ty>(
) -> Option<&'ty Ty> {
match ty.kind(Interner) {
TyKind::Ref(.., ty) => Some(ty),
- // FIXME: Maybe accept this but diagnose if its not explicit?
TyKind::Raw(.., ty) if explicit => Some(ty),
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
if crate::lang_items::is_box(table.db, adt) {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 5dd8e2719..f4fbace19 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -5,13 +5,13 @@ use std::{iter, sync::Arc};
use tracing::debug;
-use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
+use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
use base_db::CrateId;
use hir_def::{
hir::Movability,
- lang_item::{lang_attr, LangItem, LangItemTarget},
+ lang_item::{LangItem, LangItemTarget},
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId,
};
use hir_expand::name::name;
@@ -46,7 +46,7 @@ pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Inte
pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
pub(crate) type Variances = chalk_ir::Variances<Interner>;
-impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
+impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
self.db.associated_ty_data(id)
}
@@ -60,9 +60,37 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
// FIXME: keep track of these
Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
}
- fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
- // FIXME: keep track of this
- chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner)
+ fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+ if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) {
+ if let hir_def::AdtId::EnumId(e) = id.0 {
+ let enum_data = self.db.enum_data(e);
+ let ty = enum_data.repr.unwrap_or_default().discr_type();
+ return chalk_ir::TyKind::Scalar(match ty {
+ hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed {
+ true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize),
+ false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize),
+ },
+ hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed {
+ true => chalk_ir::Scalar::Int(match size {
+ hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8,
+ hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16,
+ hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32,
+ hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64,
+ hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128,
+ }),
+ false => chalk_ir::Scalar::Uint(match size {
+ hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8,
+ hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16,
+ hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32,
+ hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64,
+ hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128,
+ }),
+ },
+ })
+ .intern(Interner);
+ }
+ }
+ chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner)
}
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
self.db.impl_datum(self.krate, impl_id)
@@ -565,7 +593,7 @@ pub(crate) fn trait_datum_query(
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
- let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item);
+ let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
id: trait_id,
binders: make_binders(db, &generic_params, trait_datum_bound),
@@ -593,6 +621,7 @@ fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> {
LangItem::Unsize => WellKnownTrait::Unsize,
LangItem::Tuple => WellKnownTrait::Tuple,
LangItem::PointeeTrait => WellKnownTrait::Pointee,
+ LangItem::FnPtrTrait => WellKnownTrait::FnPtr,
_ => return None,
})
}
@@ -614,6 +643,7 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
WellKnownTrait::Unpin => LangItem::Unpin,
WellKnownTrait::Unsize => LangItem::Unsize,
WellKnownTrait::Pointee => LangItem::PointeeTrait,
+ WellKnownTrait::FnPtr => LangItem::FnPtrTrait,
}
}
@@ -844,28 +874,34 @@ pub(super) fn generic_predicate_to_inline_bound(
}
let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
.iter()
- .map(|ty| ty.clone().cast(Interner))
+ .cloned()
+ .casted(Interner)
.collect();
let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
}
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
- let trait_ = projection_ty.trait_(db);
- if projection_ty.self_type_parameter(db) != self_ty_shifted_in {
+ let generics =
+ generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into());
+ let (assoc_args, trait_args) =
+ projection_ty.substitution.as_slice(Interner).split_at(generics.len_self());
+ let (self_ty, args_no_self) =
+ trait_args.split_first().expect("projection without trait self type");
+ if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in {
return None;
}
- let args_no_self = projection_ty.substitution.as_slice(Interner)[1..]
- .iter()
- .map(|ty| ty.clone().cast(Interner))
- .collect();
+
+ let args_no_self = args_no_self.iter().cloned().casted(Interner).collect();
+ let parameters = assoc_args.to_vec();
+
let alias_eq_bound = rust_ir::AliasEqBound {
value: ty.clone(),
trait_bound: rust_ir::TraitBound {
- trait_id: to_chalk_trait_id(trait_),
+ trait_id: to_chalk_trait_id(projection_ty.trait_(db)),
args_no_self,
},
associated_ty_id: projection_ty.associated_ty_id,
- parameters: Vec::new(), // FIXME we don't support generic associated types yet
+ parameters,
};
Some(chalk_ir::Binders::new(
binders,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index a8071591a..c0b243ea2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -343,7 +343,8 @@ impl TyExt for Ty {
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
let crate_id = owner.module(db.upcast()).krate();
- let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else {
+ let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
+ else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 262341c6e..1c0f7b08d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -16,7 +16,8 @@ use triomphe::Arc;
use crate::{
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode,
mir::monomorphize_mir_body_bad, to_placeholder_idx, utils::Generics, Const, ConstData,
- ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, Ty, TyBuilder,
+ ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
+ TyBuilder,
};
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@@ -88,7 +89,7 @@ pub(crate) fn path_to_const(
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
}
ParamLoweringMode::Variable => match args.param_idx(p.into()) {
- Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
+ Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
@@ -135,15 +136,15 @@ pub fn intern_const_ref(
ty: Ty,
krate: CrateId,
) -> Const {
- let layout = db.layout_of_ty(ty.clone(), krate);
+ let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate)));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
- let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
+ let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
LiteralConstRef::UInt(i) => {
- let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
+ let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
@@ -171,9 +172,9 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
- ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
+ ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(&it, false))),
ConstScalar::UnevaluatedConst(c, subst) => {
- let ec = db.const_eval(*c, subst.clone()).ok()?;
+ let ec = db.const_eval(*c, subst.clone(), None).ok()?;
try_const_usize(db, &ec)
}
_ => None,
@@ -186,6 +187,7 @@ pub(crate) fn const_eval_recover(
_: &[String],
_: &GeneralConstId,
_: &Substitution,
+ _: &Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
@@ -210,6 +212,7 @@ pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
def: GeneralConstId,
subst: Substitution,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError> {
let body = match def {
GeneralConstId::ConstId(c) => {
@@ -228,7 +231,7 @@ pub(crate) fn const_eval_query(
}
GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
};
- let c = interpret_mir(db, &body, false).0?;
+ let c = interpret_mir(db, body, false, trait_env).0?;
Ok(c)
}
@@ -241,7 +244,7 @@ pub(crate) fn const_eval_static_query(
Substitution::empty(Interner),
db.trait_environment_for_body(def.into()),
)?;
- let c = interpret_mir(db, &body, false).0?;
+ let c = interpret_mir(db, body, false, None).0?;
Ok(c)
}
@@ -268,7 +271,7 @@ pub(crate) fn const_eval_discriminant_variant(
Substitution::empty(Interner),
db.trait_environment_for_body(def),
)?;
- let c = interpret_mir(db, &mir_body, false).0?;
+ let c = interpret_mir(db, mir_body, false, None).0?;
let c = try_const_usize(db, &c).unwrap() as i128;
Ok(c)
}
@@ -293,7 +296,7 @@ pub(crate) fn eval_to_const(
}
let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
- if let Ok(result) = interpret_mir(db, &mir_body, true).0 {
+ if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
return result;
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 0db1fefbf..666955fa1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1,10 +1,11 @@
use base_db::{fixture::WithFixture, FileId};
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
+use test_utils::skip_slow_tests;
use crate::{
consteval::try_const_usize, db::HirDatabase, mir::pad16, test_db::TestDB, Const, ConstScalar,
- Interner,
+ Interner, MemoryMap,
};
use super::{
@@ -16,7 +17,7 @@ mod intrinsics;
fn simplify(e: ConstEvalError) -> ConstEvalError {
match e {
- ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e, _, _)) => {
+ ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => {
simplify(ConstEvalError::MirEvalError(*e))
}
_ => e,
@@ -36,7 +37,37 @@ fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) {
#[track_caller]
fn check_number(ra_fixture: &str, answer: i128) {
- let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ check_answer(ra_fixture, |b, _| {
+ assert_eq!(
+ b,
+ &answer.to_le_bytes()[0..b.len()],
+ "Bytes differ. In decimal form: actual = {}, expected = {answer}",
+ i128::from_le_bytes(pad16(b, true))
+ );
+ });
+}
+
+#[track_caller]
+fn check_str(ra_fixture: &str, answer: &str) {
+ check_answer(ra_fixture, |b, mm| {
+ let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
+ let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
+ let Some(bytes) = mm.get(addr, size) else {
+ panic!("string data missed in the memory map");
+ };
+ assert_eq!(
+ bytes,
+ answer.as_bytes(),
+ "Bytes differ. In string form: actual = {}, expected = {answer}",
+ String::from_utf8_lossy(bytes)
+ );
+ });
+}
+
+#[track_caller]
+fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
+ let (db, file_ids) = TestDB::with_many_files(ra_fixture);
+ let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
Err(e) => {
@@ -46,13 +77,8 @@ fn check_number(ra_fixture: &str, answer: i128) {
};
match &r.data(Interner).value {
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
- ConstScalar::Bytes(b, _) => {
- assert_eq!(
- b,
- &answer.to_le_bytes()[0..b.len()],
- "Bytes differ. In decimal form: actual = {}, expected = {answer}",
- i128::from_le_bytes(pad16(b, true))
- );
+ ConstScalar::Bytes(b, mm) => {
+ check(b, mm);
}
x => panic!("Expected number but found {:?}", x),
},
@@ -87,8 +113,8 @@ fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> {
}
_ => None,
})
- .unwrap();
- db.const_eval(const_id.into(), Substitution::empty(Interner))
+ .expect("No const named GOAL found in the test");
+ db.const_eval(const_id.into(), Substitution::empty(Interner), None)
}
#[test]
@@ -108,6 +134,7 @@ fn bit_op() {
check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| {
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string()))
});
+ check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128);
}
#[test]
@@ -166,14 +193,21 @@ fn casts() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
+ struct X {
+ unsize_field: [u8],
+ }
+
const GOAL: usize = {
let a = [10, 20, 3, 15];
let x: &[i32] = &a;
- let y: *const [i32] = x;
- let z = y as *const [u8]; // slice fat pointer cast don't touch metadata
- let q = z as *const str;
- let p = q as *const [u8];
- let w = unsafe { &*z };
+ let x: *const [i32] = x;
+ let x = x as *const [u8]; // slice fat pointer cast don't touch metadata
+ let x = x as *const str;
+ let x = x as *const X;
+ let x = x as *const [i16];
+ let x = x as *const X;
+ let x = x as *const [u8];
+ let w = unsafe { &*x };
w.len()
};
"#,
@@ -199,6 +233,30 @@ fn raw_pointer_equality() {
}
#[test]
+fn alignment() {
+ check_answer(
+ r#"
+//- minicore: transmute
+use core::mem::transmute;
+const GOAL: usize = {
+ let x: i64 = 2;
+ transmute(&x)
+}
+ "#,
+ |b, _| assert_eq!(b[0] % 8, 0),
+ );
+ check_answer(
+ r#"
+//- minicore: transmute
+use core::mem::transmute;
+static X: i64 = 12;
+const GOAL: usize = transmute(&X);
+ "#,
+ |b, _| assert_eq!(b[0] % 8, 0),
+ );
+}
+
+#[test]
fn locals() {
check_number(
r#"
@@ -1129,6 +1187,25 @@ fn pattern_matching_ergonomics() {
}
#[test]
+fn destructing_assignment() {
+ check_number(
+ r#"
+ //- minicore: add
+ const fn f(i: &mut u8) -> &mut u8 {
+ *i += 1;
+ i
+ }
+ const GOAL: u8 = {
+ let mut i = 4;
+ _ = f(&mut i);
+ i
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
fn let_else() {
check_number(
r#"
@@ -1370,14 +1447,14 @@ fn builtin_derive_macro() {
#[derive(Clone)]
struct Y {
field1: i32,
- field2: u8,
+ field2: ((i32, u8), i64),
}
const GOAL: u8 = {
- let x = X(2, Z::Foo(Y { field1: 4, field2: 5 }), 8);
+ let x = X(2, Z::Foo(Y { field1: 4, field2: ((32, 5), 12) }), 8);
let x = x.clone();
let Z::Foo(t) = x.1;
- t.field2
+ t.field2.0 .1
};
"#,
5,
@@ -1551,6 +1628,58 @@ fn closures() {
}
#[test]
+fn manual_fn_trait_impl() {
+ check_number(
+ r#"
+//- minicore: fn, copy
+struct S(i32);
+
+impl FnOnce<(i32, i32)> for S {
+ type Output = i32;
+
+ extern "rust-call" fn call_once(self, arg: (i32, i32)) -> i32 {
+ arg.0 + arg.1 + self.0
+ }
+}
+
+const GOAL: i32 = {
+ let s = S(1);
+ s(2, 3)
+};
+"#,
+ 6,
+ );
+}
+
+#[test]
+fn closure_capture_unsized_type() {
+ check_number(
+ r#"
+ //- minicore: fn, copy, slice, index, coerce_unsized
+ fn f<T: A>(x: &<T as A>::Ty) -> &<T as A>::Ty {
+ let c = || &*x;
+ c()
+ }
+
+ trait A {
+ type Ty;
+ }
+
+ impl A for i32 {
+ type Ty = [u8];
+ }
+
+ const GOAL: u8 = {
+ let k: &[u8] = &[1, 2, 3];
+ let k = f::<i32>(k);
+ k[0] + k[1] + k[2]
+ }
+ "#,
+ 6,
+ );
+}
+
+#[test]
fn closure_and_impl_fn() {
check_number(
r#"
@@ -1636,6 +1765,24 @@ fn function_pointer_in_constants() {
}
#[test]
+fn function_pointer_and_niche_optimization() {
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: i32 = {
+ let f: fn(i32) -> i32 = |x| x + 2;
+ let init = Some(f);
+ match init {
+ Some(t) => t(3),
+ None => 222,
+ }
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
fn function_pointer() {
check_number(
r#"
@@ -1663,6 +1810,18 @@ fn function_pointer() {
);
check_number(
r#"
+ fn add2(x: u8) -> u8 {
+ x + 2
+ }
+ const GOAL: u8 = {
+ let plus2 = add2 as fn(u8) -> u8;
+ plus2(3)
+ };
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
//- minicore: coerce_unsized, index, slice
fn add2(x: u8) -> u8 {
x + 2
@@ -1847,6 +2006,65 @@ fn dyn_trait() {
"#,
900,
);
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ trait A {
+ fn x(&self) -> i32;
+ }
+
+ trait B: A {}
+
+ impl A for i32 {
+ fn x(&self) -> i32 {
+ 5
+ }
+ }
+
+ impl B for i32 {
+
+ }
+
+ const fn f(x: &dyn B) -> i32 {
+ x.x()
+ }
+
+ const GOAL: i32 = f(&2i32);
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn coerce_unsized() {
+ check_number(
+ r#"
+//- minicore: coerce_unsized, deref_mut, slice, index, transmute, non_null
+use core::ops::{Deref, DerefMut, CoerceUnsized};
+use core::{marker::Unsize, mem::transmute, ptr::NonNull};
+
+struct ArcInner<T: ?Sized> {
+ strong: usize,
+ weak: usize,
+ data: T,
+}
+
+pub struct Arc<T: ?Sized> {
+ inner: NonNull<ArcInner<T>>,
+}
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
+
+const GOAL: usize = {
+ let x = transmute::<usize, Arc<[i32; 3]>>(12);
+ let y: Arc<[i32]> = x;
+ let z = transmute::<Arc<[i32]>, (usize, usize)>(y);
+ z.1
+};
+
+ "#,
+ 3,
+ );
}
#[test]
@@ -1961,6 +2179,17 @@ fn array_and_index() {
}
#[test]
+fn string() {
+ check_str(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: &str = "hello";
+ "#,
+ "hello",
+ );
+}
+
+#[test]
fn byte_string() {
check_number(
r#"
@@ -2018,6 +2247,57 @@ fn consts() {
"#,
6,
);
+
+ check_number(
+ r#"
+ const F1: i32 = 2147483647;
+ const F2: i32 = F1 - 25;
+ const GOAL: i32 = F2;
+ "#,
+ 2147483622,
+ );
+
+ check_number(
+ r#"
+ const F1: i32 = -2147483648;
+ const F2: i32 = F1 + 18;
+ const GOAL: i32 = F2;
+ "#,
+ -2147483630,
+ );
+
+ check_number(
+ r#"
+ const F1: i32 = 10;
+ const F2: i32 = F1 - 20;
+ const GOAL: i32 = F2;
+ "#,
+ -10,
+ );
+
+ check_number(
+ r#"
+ const F1: i32 = 25;
+ const F2: i32 = F1 - 25;
+ const GOAL: i32 = F2;
+ "#,
+ 0,
+ );
+
+ check_number(
+ r#"
+ const A: i32 = -2147483648;
+ const GOAL: bool = A > 0;
+ "#,
+ 0,
+ );
+
+ check_number(
+ r#"
+ const GOAL: i64 = (-2147483648_i32) as i64;
+ "#,
+ -2147483648,
+ );
}
#[test]
@@ -2116,11 +2396,14 @@ fn const_loop() {
fn const_transfer_memory() {
check_number(
r#"
- const A1: &i32 = &2;
- const A2: &i32 = &5;
- const GOAL: i32 = *A1 + *A2;
+ //- minicore: slice, index, coerce_unsized
+ const A1: &i32 = &1;
+ const A2: &i32 = &10;
+ const A3: [&i32; 3] = [&1, &2, &100];
+ const A4: (i32, &i32) = (1, &1000);
+ const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1;
"#,
- 7,
+ 1111,
);
}
@@ -2287,6 +2570,51 @@ fn const_trait_assoc() {
);
check_number(
r#"
+ //- /a/lib.rs crate:a
+ pub trait ToConst {
+ const VAL: usize;
+ }
+ pub const fn to_const<T: ToConst>() -> usize {
+ T::VAL
+ }
+ //- /main.rs crate:main deps:a
+ use a::{ToConst, to_const};
+ struct U0;
+ impl ToConst for U0 {
+ const VAL: usize = 5;
+ }
+ const GOAL: usize = to_const::<U0>();
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ //- minicore: size_of, fn
+ //- /a/lib.rs crate:a
+ use core::mem::size_of;
+ pub struct S<T>(T);
+ impl<T> S<T> {
+ pub const X: usize = {
+ let k: T;
+ let f = || core::mem::size_of::<T>();
+ f()
+ };
+ }
+ //- /main.rs crate:main deps:a
+ use a::{S};
+ trait Tr {
+ type Ty;
+ }
+ impl Tr for i32 {
+ type Ty = u64;
+ }
+ struct K<T: Tr>(<T as Tr>::Ty);
+ const GOAL: usize = S::<K<i32>>::X;
+ "#,
+ 8,
+ );
+ check_number(
+ r#"
struct S<T>(*mut T);
trait MySized: Sized {
@@ -2311,21 +2639,11 @@ fn const_trait_assoc() {
}
#[test]
-fn panic_messages() {
- check_fail(
- r#"
- //- minicore: panic
- const GOAL: u8 = {
- let x: u16 = 2;
- panic!("hello");
- };
- "#,
- |e| e == ConstEvalError::MirEvalError(MirEvalError::Panic("hello".to_string())),
- );
-}
-
-#[test]
fn exec_limits() {
+ if skip_slow_tests() {
+ return;
+ }
+
check_fail(
r#"
const GOAL: usize = loop {};
@@ -2339,7 +2657,7 @@ fn exec_limits() {
}
const GOAL: i32 = f(0);
"#,
- |e| e == ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
+ |e| e == ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
);
// Reasonable code should still work
check_number(
@@ -2356,9 +2674,31 @@ fn exec_limits() {
}
sum
}
- const GOAL: i32 = f(10000);
+ const GOAL: i32 = f(1000);
"#,
- 10000 * 10000,
+ 1000 * 1000,
+ );
+}
+
+#[test]
+fn memory_limit() {
+ check_fail(
+ r#"
+ extern "Rust" {
+ #[rustc_allocator]
+ fn __rust_alloc(size: usize, align: usize) -> *mut u8;
+ }
+
+ const GOAL: u8 = unsafe {
+ __rust_alloc(30_000_000_000, 1); // 30GB
+ 2
+ };
+ "#,
+ |e| {
+ e == ConstEvalError::MirEvalError(MirEvalError::Panic(
+ "Memory allocation of 30000000000 bytes failed".to_string(),
+ ))
+ },
);
}
@@ -2377,6 +2717,37 @@ fn type_error() {
}
#[test]
+fn unsized_field() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice, transmute
+ use core::mem::transmute;
+
+ struct Slice([usize]);
+ struct Slice2(Slice);
+
+ impl Slice2 {
+ fn as_inner(&self) -> &Slice {
+ &self.0
+ }
+
+ fn as_bytes(&self) -> &[usize] {
+ &self.as_inner().0
+ }
+ }
+
+ const GOAL: usize = unsafe {
+ let x: &[usize] = &[1, 2, 3];
+ let x: &Slice2 = transmute(x);
+ let x = x.as_bytes();
+ x[0] + x[1] + x[2] + x.len() * 100
+ };
+ "#,
+ 306,
+ );
+}
+
+#[test]
fn unsized_local() {
check_fail(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs
index e05d824db..2855f7890 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs
@@ -15,6 +15,171 @@ fn size_of() {
}
#[test]
+fn size_of_val() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X(i32, u8);
+
+ const GOAL: usize = size_of_val(&X(1, 2));
+ "#,
+ 8,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ let it: &[i32] = &[1, 2, 3];
+ size_of_val(it)
+ };
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, transmute
+ use core::mem::transmute;
+
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X {
+ x: i64,
+ y: u8,
+ t: [i32],
+ }
+
+ const GOAL: usize = unsafe {
+ let y: &X = transmute([0usize, 3]);
+ size_of_val(y)
+ };
+ "#,
+ 24,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, transmute
+ use core::mem::transmute;
+
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X {
+ x: i32,
+ y: i64,
+ t: [u8],
+ }
+
+ const GOAL: usize = unsafe {
+ let y: &X = transmute([0usize, 15]);
+ size_of_val(y)
+ };
+ "#,
+ 32,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, fmt, builtin_impls
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ let x: &i16 = &5;
+ let y: &dyn core::fmt::Debug = x;
+ let z: &dyn core::fmt::Debug = &y;
+ size_of_val(x) + size_of_val(y) * 10 + size_of_val(z) * 100
+ };
+ "#,
+ 1622,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ size_of_val("salam")
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn min_align_of_val() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X(i32, u8);
+
+ const GOAL: usize = min_align_of_val(&X(1, 2));
+ "#,
+ 4,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ let x: &[i32] = &[1, 2, 3];
+ min_align_of_val(x)
+ };
+ "#,
+ 4,
+ );
+}
+
+#[test]
+fn type_name() {
+ check_str(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn type_name<T: ?Sized>() -> &'static str;
+ }
+
+ const GOAL: &str = type_name::<i32>();
+ "#,
+ "i32",
+ );
+ check_str(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn type_name<T: ?Sized>() -> &'static str;
+ }
+
+ mod mod1 {
+ pub mod mod2 {
+ pub struct Ty;
+ }
+ }
+
+ const GOAL: &str = type_name::<mod1::mod2::Ty>();
+ "#,
+ "mod1::mod2::Ty",
+ );
+}
+
+#[test]
fn transmute() {
check_number(
r#"
@@ -29,9 +194,28 @@ fn transmute() {
}
#[test]
+fn read_via_copy() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn read_via_copy<T>(e: *const T) -> T;
+ pub fn volatile_load<T>(e: *const T) -> T;
+ }
+
+ const GOAL: i32 = {
+ let x = 2;
+ read_via_copy(&x) + volatile_load(&x)
+ };
+ "#,
+ 4,
+ );
+}
+
+#[test]
fn const_eval_select() {
check_number(
r#"
+ //- minicore: fn
extern "rust-intrinsic" {
pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET
where
@@ -68,7 +252,29 @@ fn wrapping_add() {
}
#[test]
-fn saturating_add() {
+fn ptr_offset_from() {
+ check_number(
+ r#"
+ //- minicore: index, slice, coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
+ pub fn ptr_offset_from_unsigned<T>(ptr: *const T, base: *const T) -> usize;
+ }
+
+ const GOAL: isize = {
+ let x = [1, 2, 3, 4, 5i32];
+ let r1 = -ptr_offset_from(&x[0], &x[4]);
+ let r2 = ptr_offset_from(&x[3], &x[1]);
+ let r3 = ptr_offset_from_unsigned(&x[3], &x[0]) as isize;
+ r3 * 100 + r2 * 10 + r1
+ };
+ "#,
+ 324,
+ );
+}
+
+#[test]
+fn saturating() {
check_number(
r#"
extern "rust-intrinsic" {
@@ -82,6 +288,16 @@ fn saturating_add() {
check_number(
r#"
extern "rust-intrinsic" {
+ pub fn saturating_sub<T>(a: T, b: T) -> T;
+ }
+
+ const GOAL: bool = saturating_sub(5u8, 7) == 0 && saturating_sub(8u8, 4) == 4;
+ "#,
+ 1,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
pub fn saturating_add<T>(a: T, b: T) -> T;
}
@@ -112,6 +328,7 @@ fn allocator() {
*ptr = 23;
*ptr2 = 32;
let ptr = __rust_realloc(ptr, 4, 1, 8);
+ let ptr = __rust_realloc(ptr, 8, 1, 3);
let ptr2 = ((ptr as usize) + 1) as *mut u8;
*ptr + *ptr2
};
@@ -160,6 +377,24 @@ fn needs_drop() {
}
#[test]
+fn discriminant_value() {
+ check_number(
+ r#"
+ //- minicore: discriminant, option
+ use core::marker::DiscriminantKind;
+ extern "rust-intrinsic" {
+ pub fn discriminant_value<T>(v: &T) -> <T as DiscriminantKind>::Discriminant;
+ }
+ const GOAL: bool = {
+ discriminant_value(&Some(2i32)) == discriminant_value(&Some(5i32))
+ && discriminant_value(&Some(2i32)) != discriminant_value(&None::<i32>)
+ };
+ "#,
+ 1,
+ );
+}
+
+#[test]
fn likely() {
check_number(
r#"
@@ -225,6 +460,8 @@ fn atomic() {
pub fn atomic_nand_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_or_release<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_xor_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
+ pub fn atomic_fence_seqcst();
+ pub fn atomic_singlethreadfence_acqrel();
}
fn should_not_reach() {
@@ -239,6 +476,7 @@ fn atomic() {
if (30, true) != atomic_cxchg_release_seqcst(&mut y, 30, 40) {
should_not_reach();
}
+ atomic_fence_seqcst();
if (40, false) != atomic_cxchg_release_seqcst(&mut y, 30, 50) {
should_not_reach();
}
@@ -246,6 +484,7 @@ fn atomic() {
should_not_reach();
}
let mut z = atomic_xsub_seqcst(&mut x, -200);
+ atomic_singlethreadfence_acqrel();
atomic_xor_seqcst(&mut x, 1024);
atomic_load_seqcst(&x) + z * 3 + atomic_load_seqcst(&y) * 2
};
@@ -328,6 +567,24 @@ fn copy_nonoverlapping() {
}
#[test]
+fn write_bytes() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
+ }
+
+ const GOAL: i32 = unsafe {
+ let mut x = 2;
+ write_bytes(&mut x, 5, 1);
+ x
+ };
+ "#,
+ 0x05050505,
+ );
+}
+
+#[test]
fn copy() {
check_number(
r#"
@@ -363,6 +620,20 @@ fn ctpop() {
}
#[test]
+fn ctlz() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn ctlz<T: Copy>(x: T) -> T;
+ }
+
+ const GOAL: u8 = ctlz(0b0001_1100_u8);
+ "#,
+ 3,
+ );
+}
+
+#[test]
fn cttz() {
check_number(
r#"
@@ -375,3 +646,85 @@ fn cttz() {
3,
);
}
+
+#[test]
+fn rotate() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i64 = rotate_left(0xaa00000000006e1i64, 12);
+ "#,
+ 0x6e10aa,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i64 = rotate_right(0x6e10aa, 12);
+ "#,
+ 0xaa00000000006e1,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i8 = rotate_left(129, 2);
+ "#,
+ 6,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i32 = rotate_right(10006016, 1020315);
+ "#,
+ 320192512,
+ );
+}
+
+#[test]
+fn simd() {
+ check_number(
+ r#"
+ pub struct i8x16(
+ i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,
+ );
+ extern "platform-intrinsic" {
+ pub fn simd_bitmask<T, U>(x: T) -> U;
+ }
+ const GOAL: u16 = simd_bitmask(i8x16(
+ 0, 1, 0, 0, 2, 255, 100, 0, 50, 0, 1, 1, 0, 0, 0, 0
+ ));
+ "#,
+ 0b0000110101110010,
+ );
+ check_number(
+ r#"
+ pub struct i8x16(
+ i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,
+ );
+ extern "platform-intrinsic" {
+ pub fn simd_lt<T, U>(x: T, y: T) -> U;
+ pub fn simd_bitmask<T, U>(x: T) -> U;
+ }
+ const GOAL: u16 = simd_bitmask(simd_lt::<i8x16, i8x16>(
+ i8x16(
+ -105, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+ ),
+ i8x16(
+ -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11
+ ),
+ ));
+ "#,
+ 0xFFFF,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 9dd810f84..9c96b5ab8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -77,8 +77,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
- fn const_eval(&self, def: GeneralConstId, subst: Substitution)
- -> Result<Const, ConstEvalError>;
+ fn const_eval(
+ &self,
+ def: GeneralConstId,
+ subst: Substitution,
+ trait_env: Option<Arc<crate::TraitEnvironment>>,
+ ) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
@@ -100,16 +104,28 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: AdtId,
subst: Substitution,
- krate: CrateId,
+ env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
- fn layout_of_ty(&self, ty: Ty, krate: CrateId) -> Result<Arc<Layout>, LayoutError>;
+ fn layout_of_ty(
+ &self,
+ ty: Ty,
+ env: Arc<crate::TraitEnvironment>,
+ ) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
+ #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
+ fn lookup_impl_method(
+ &self,
+ env: Arc<crate::TraitEnvironment>,
+ func: FunctionId,
+ fn_subst: Substitution,
+ ) -> (FunctionId, Substitution);
+
#[salsa::invoke(crate::lower::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index 4b147b997..ef43ed5c4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -5,7 +5,7 @@ mod unsafe_check;
mod decl_check;
pub use crate::diagnostics::{
- decl_check::{incorrect_case, IncorrectCase},
+ decl_check::{incorrect_case, CaseType, IncorrectCase},
expr::{
record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
},
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 1233469b9..a94a962c1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -14,13 +14,12 @@ mod case_conv;
use std::fmt;
-use base_db::CrateId;
use hir_def::{
data::adt::VariantData,
hir::{Pat, PatId},
src::HasSource,
- AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, StaticId,
- StructId,
+ AdtId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ItemContainerId,
+ Lookup, ModuleDefId, StaticId, StructId,
};
use hir_expand::{
name::{AsName, Name},
@@ -44,24 +43,20 @@ mod allow {
pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
}
-pub fn incorrect_case(
- db: &dyn HirDatabase,
- krate: CrateId,
- owner: ModuleDefId,
-) -> Vec<IncorrectCase> {
+pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = profile::span("validate_module_item");
- let mut validator = DeclValidator::new(db, krate);
+ let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink
}
#[derive(Debug)]
pub enum CaseType {
- // `some_var`
+ /// `some_var`
LowerSnakeCase,
- // `SOME_CONST`
+ /// `SOME_CONST`
UpperSnakeCase,
- // `SomeStruct`
+ /// `SomeStruct`
UpperCamelCase,
}
@@ -120,7 +115,6 @@ pub struct IncorrectCase {
pub(super) struct DeclValidator<'a> {
db: &'a dyn HirDatabase,
- krate: CrateId,
pub(super) sink: Vec<IncorrectCase>,
}
@@ -132,8 +126,8 @@ struct Replacement {
}
impl<'a> DeclValidator<'a> {
- pub(super) fn new(db: &'a dyn HirDatabase, krate: CrateId) -> DeclValidator<'a> {
- DeclValidator { db, krate, sink: Vec::new() }
+ pub(super) fn new(db: &'a dyn HirDatabase) -> DeclValidator<'a> {
+ DeclValidator { db, sink: Vec::new() }
}
pub(super) fn validate_item(&mut self, item: ModuleDefId) {
@@ -181,6 +175,8 @@ impl<'a> DeclValidator<'a> {
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ExternCrateId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ AttrDefId::UseId(id) => Some(id.lookup(self.db.upcast()).container.into()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
@@ -194,8 +190,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
- .map(|mid| self.allowed(mid, allow_name, true))
- .unwrap_or(false)
+ .is_some_and(|mid| self.allowed(mid, allow_name, true))
}
fn validate_func(&mut self, func: FunctionId) {
@@ -205,17 +200,7 @@ impl<'a> DeclValidator<'a> {
return;
}
- let body = self.db.body(func.into());
-
- // Recursively validate inner scope items, such as static variables and constants.
- for (_, block_def_map) in body.blocks(self.db.upcast()) {
- for (_, module) in block_def_map.modules() {
- for def_id in module.scope.declarations() {
- let mut validator = DeclValidator::new(self.db, self.krate);
- validator.validate_item(def_id);
- }
- }
- }
+ self.validate_body_inner_items(func.into());
// Check whether non-snake case identifiers are allowed for this function.
if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
@@ -230,6 +215,8 @@ impl<'a> DeclValidator<'a> {
expected_case: CaseType::LowerSnakeCase,
});
+ let body = self.db.body(func.into());
+
// Check the patterns inside the function body.
// This includes function parameters.
let pats_replacements = body
@@ -495,6 +482,11 @@ impl<'a> DeclValidator<'a> {
fn validate_enum(&mut self, enum_id: EnumId) {
let data = self.db.enum_data(enum_id);
+ for (local_id, _) in data.variants.iter() {
+ let variant_id = EnumVariantId { parent: enum_id, local_id };
+ self.validate_body_inner_items(variant_id.into());
+ }
+
// Check whether non-camel case names are allowed for this enum.
if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
@@ -511,13 +503,11 @@ impl<'a> DeclValidator<'a> {
// Check the field names.
let enum_fields_replacements = data
.variants
- .iter()
- .filter_map(|(_, variant)| {
+ .values()
+ .filter_map(|variant| {
Some(Replacement {
current_name: variant.name.clone(),
- suggested_text: to_camel_case(
- &variant.name.display(self.db.upcast()).to_string(),
- )?,
+ suggested_text: to_camel_case(&variant.name.to_smol_str())?,
expected_case: CaseType::UpperCamelCase,
})
})
@@ -621,6 +611,8 @@ impl<'a> DeclValidator<'a> {
fn validate_const(&mut self, const_id: ConstId) {
let data = self.db.const_data(const_id);
+ self.validate_body_inner_items(const_id.into());
+
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
@@ -630,7 +622,7 @@ impl<'a> DeclValidator<'a> {
None => return,
};
- let const_name = name.display(self.db.upcast()).to_string();
+ let const_name = name.to_smol_str();
let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
Replacement {
current_name: name.clone(),
@@ -669,13 +661,15 @@ impl<'a> DeclValidator<'a> {
return;
}
+ self.validate_body_inner_items(static_id.into());
+
if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
let name = &data.name;
- let static_name = name.display(self.db.upcast()).to_string();
+ let static_name = name.to_smol_str();
let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
Replacement {
current_name: name.clone(),
@@ -706,4 +700,17 @@ impl<'a> DeclValidator<'a> {
self.sink.push(diagnostic);
}
+
+ // FIXME: We don't currently validate names within `DefWithBodyId::InTypeConstId`.
+ /// Recursively validates inner scope items, such as static variables and constants.
+ fn validate_body_inner_items(&mut self, body_id: DefWithBodyId) {
+ let body = self.db.body(body_id);
+ for (_, block_def_map) in body.blocks(self.db.upcast()) {
+ for (_, module) in block_def_map.modules() {
+ for def_id in module.scope.declarations() {
+ self.validate_item(def_id);
+ }
+ }
+ }
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index c1df24d17..1b4ee4613 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -29,6 +29,7 @@ use itertools::Itertools;
use la_arena::ArenaMap;
use smallvec::SmallVec;
use stdx::never;
+use triomphe::Arc;
use crate::{
consteval::try_const_usize,
@@ -43,26 +44,19 @@ use crate::{
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
- Substitution, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
+ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
};
pub trait HirWrite: fmt::Write {
- fn start_location_link(&mut self, location: ModuleDefId);
- fn end_location_link(&mut self);
+ fn start_location_link(&mut self, _location: ModuleDefId) {}
+ fn end_location_link(&mut self) {}
}
// String will ignore link metadata
-impl HirWrite for String {
- fn start_location_link(&mut self, _: ModuleDefId) {}
-
- fn end_location_link(&mut self) {}
-}
+impl HirWrite for String {}
// `core::Formatter` will ignore metadata
-impl HirWrite for fmt::Formatter<'_> {
- fn start_location_link(&mut self, _: ModuleDefId) {}
- fn end_location_link(&mut self) {}
-}
+impl HirWrite for fmt::Formatter<'_> {}
pub struct HirFormatter<'a> {
pub db: &'a dyn HirDatabase,
@@ -192,7 +186,7 @@ pub trait HirDisplay {
}
}
-impl<'a> HirFormatter<'a> {
+impl HirFormatter<'_> {
pub fn write_joined<T: HirDisplay>(
&mut self,
iter: impl IntoIterator<Item = T>,
@@ -342,7 +336,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
}
}
-impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
+impl<T> fmt::Display for HirDisplayWrapper<'_, T>
where
T: HirDisplay,
{
@@ -360,7 +354,7 @@ where
const TYPE_HINT_TRUNCATION: &str = "…";
-impl<T: HirDisplay> HirDisplay for &'_ T {
+impl<T: HirDisplay> HirDisplay for &T {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
HirDisplay::hir_fmt(*self, f)
}
@@ -446,28 +440,6 @@ impl HirDisplay for Const {
}
}
-pub struct HexifiedConst(pub Const);
-
-impl HirDisplay for HexifiedConst {
- fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
- let data = &self.0.data(Interner);
- if let TyKind::Scalar(s) = data.ty.kind(Interner) {
- if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
- if let ConstValue::Concrete(c) = &data.value {
- if let ConstScalar::Bytes(b, m) = &c.interned {
- let value = u128::from_le_bytes(pad16(b, false));
- if value >= 10 {
- render_const_scalar(f, &b, m, &data.ty)?;
- return write!(f, " ({:#X})", value);
- }
- }
- }
- }
- }
- self.0.hir_fmt(f)
- }
-}
-
fn render_const_scalar(
f: &mut HirFormatter<'_>,
b: &[u8],
@@ -476,33 +448,35 @@ fn render_const_scalar(
) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
- let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
+ let trait_env = Arc::new(TraitEnvironment::empty(
+ *f.db.crate_graph().crates_in_topological_order().last().unwrap(),
+ ));
match ty.kind(Interner) {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
Scalar::Char => {
- let x = u128::from_le_bytes(pad16(b, false)) as u32;
- let Ok(c) = char::try_from(x) else {
+ let it = u128::from_le_bytes(pad16(b, false)) as u32;
+ let Ok(c) = char::try_from(it) else {
return f.write_str("<unicode-error>");
};
write!(f, "{c:?}")
}
Scalar::Int(_) => {
- let x = i128::from_le_bytes(pad16(b, true));
- write!(f, "{x}")
+ let it = i128::from_le_bytes(pad16(b, true));
+ write!(f, "{it}")
}
Scalar::Uint(_) => {
- let x = u128::from_le_bytes(pad16(b, false));
- write!(f, "{x}")
+ let it = u128::from_le_bytes(pad16(b, false));
+ write!(f, "{it}")
}
Scalar::Float(fl) => match fl {
chalk_ir::FloatTy::F32 => {
- let x = f32::from_le_bytes(b.try_into().unwrap());
- write!(f, "{x:?}")
+ let it = f32::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{it:?}")
}
chalk_ir::FloatTy::F64 => {
- let x = f64::from_le_bytes(b.try_into().unwrap());
- write!(f, "{x:?}")
+ let it = f64::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{it:?}")
}
},
},
@@ -519,7 +493,7 @@ fn render_const_scalar(
TyKind::Slice(ty) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@@ -545,7 +519,7 @@ fn render_const_scalar(
let Ok(t) = memory_map.vtable.ty(ty_id) else {
return f.write_str("<ty-missing-in-vtable-map>");
};
- let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -577,7 +551,7 @@ fn render_const_scalar(
return f.write_str("<layout-error>");
}
});
- let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -589,7 +563,7 @@ fn render_const_scalar(
}
},
TyKind::Tuple(_, subst) => {
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else {
return f.write_str("<layout-error>");
};
f.write_str("(")?;
@@ -602,7 +576,7 @@ fn render_const_scalar(
}
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else {
f.write_str("<layout-error>")?;
continue;
};
@@ -612,7 +586,7 @@ fn render_const_scalar(
f.write_str(")")
}
TyKind::Adt(adt, subst) => {
- let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone(), trait_env.clone()) else {
return f.write_str("<layout-error>");
};
match adt.0 {
@@ -624,7 +598,7 @@ fn render_const_scalar(
&data.variant_data,
f,
&field_types,
- adt.0.module(f.db.upcast()).krate(),
+ f.db.trait_environment(adt.0.into()),
&layout,
subst,
b,
@@ -636,7 +610,8 @@ fn render_const_scalar(
}
hir_def::AdtId::EnumId(e) => {
let Some((var_id, var_layout)) =
- detect_variant_from_bytes(&layout, f.db, krate, b, e) else {
+ detect_variant_from_bytes(&layout, f.db, trait_env.clone(), b, e)
+ else {
return f.write_str("<failed-to-detect-variant>");
};
let data = &f.db.enum_data(e).variants[var_id];
@@ -647,7 +622,7 @@ fn render_const_scalar(
&data.variant_data,
f,
&field_types,
- adt.0.module(f.db.upcast()).krate(),
+ f.db.trait_environment(adt.0.into()),
&var_layout,
subst,
b,
@@ -658,15 +633,15 @@ fn render_const_scalar(
}
TyKind::FnDef(..) => ty.hir_fmt(f),
TyKind::Function(_) | TyKind::Raw(_, _) => {
- let x = u128::from_le_bytes(pad16(b, false));
- write!(f, "{:#X} as ", x)?;
+ let it = u128::from_le_bytes(pad16(b, false));
+ write!(f, "{:#X} as ", it)?;
ty.hir_fmt(f)
}
TyKind::Array(ty, len) => {
let Some(len) = try_const_usize(f.db, len) else {
return f.write_str("<unknown-array-len>");
};
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@@ -705,7 +680,7 @@ fn render_variant_after_name(
data: &VariantData,
f: &mut HirFormatter<'_>,
field_types: &ArenaMap<LocalFieldId, Binders<Ty>>,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
layout: &Layout,
subst: &Substitution,
b: &[u8],
@@ -716,7 +691,7 @@ fn render_variant_after_name(
let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize();
let ty = field_types[id].clone().substitute(Interner, subst);
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -735,7 +710,7 @@ fn render_variant_after_name(
}
write!(f, " }}")?;
} else {
- let mut it = it.map(|x| x.0);
+ let mut it = it.map(|it| it.0);
write!(f, "(")?;
if let Some(id) = it.next() {
render_field(f, id)?;
@@ -903,6 +878,13 @@ impl HirDisplay for Ty {
TyKind::FnDef(def, parameters) => {
let def = from_chalk(db, *def);
let sig = db.callable_item_signature(def).substitute(Interner, parameters);
+
+ if f.display_target.is_source_code() {
+ // `FnDef` is anonymous and there's no surface syntax for it. Show it as a
+ // function pointer type.
+ return sig.hir_fmt(f);
+ }
+
f.start_location_link(def.into());
match def {
CallableDefId::FunctionId(ff) => {
@@ -1277,19 +1259,20 @@ fn hir_fmt_generics(
i: usize,
parameters: &Substitution,
) -> bool {
- if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error)
+ if parameter.ty(Interner).map(|it| it.kind(Interner))
+ == Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) =
- parameter.constant(Interner).map(|x| &x.data(Interner).value)
+ parameter.constant(Interner).map(|it| &it.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
let default_parameter = match default_parameters.get(i) {
- Some(x) => x,
+ Some(it) => it,
None => return true,
};
let actual_default =
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 1ac0837b5..b4915dbf0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -13,6 +13,15 @@
//! to certain types. To record this, we use the union-find implementation from
//! the `ena` crate, which is extracted from rustc.
+mod cast;
+pub(crate) mod closure;
+mod coerce;
+mod expr;
+mod mutability;
+mod pat;
+mod path;
+pub(crate) mod unify;
+
use std::{convert::identity, ops::Index};
use chalk_ir::{
@@ -60,15 +69,8 @@ pub use coerce::could_coerce;
#[allow(unreachable_pub)]
pub use unify::could_unify;
-pub(crate) use self::closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
-
-pub(crate) mod unify;
-mod path;
-mod expr;
-mod pat;
-mod coerce;
-pub(crate) mod closure;
-mod mutability;
+use cast::CastCheck;
+pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
@@ -290,7 +292,7 @@ impl Default for InternedStandardTypes {
/// ```
///
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
-/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
+/// E.g., `struct Foo<T> { it: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
/// The autoderef and -ref are the same as in the above example, but the type
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
/// the underlying conversions from `[i32; 4]` to `[i32]`.
@@ -508,6 +510,8 @@ pub(crate) struct InferenceContext<'a> {
diverges: Diverges,
breakables: Vec<BreakableContext>,
+ deferred_cast_checks: Vec<CastCheck>,
+
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy>,
current_closure: Option<ClosureId>,
@@ -582,7 +586,8 @@ impl<'a> InferenceContext<'a> {
resolver,
diverges: Diverges::Maybe,
breakables: Vec::new(),
- current_captures: vec![],
+ deferred_cast_checks: Vec::new(),
+ current_captures: Vec::new(),
current_closure: None,
deferred_closures: FxHashMap::default(),
closure_dependencies: FxHashMap::default(),
@@ -594,7 +599,7 @@ impl<'a> InferenceContext<'a> {
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
pub(crate) fn resolve_all(self) -> InferenceResult {
- let InferenceContext { mut table, mut result, .. } = self;
+ let InferenceContext { mut table, mut result, deferred_cast_checks, .. } = self;
// Destructure every single field so whenever new fields are added to `InferenceResult` we
// don't forget to handle them here.
let InferenceResult {
@@ -622,6 +627,13 @@ impl<'a> InferenceContext<'a> {
table.fallback_if_possible();
+ // Comment from rustc:
+ // Even though coercion casts provide type hints, we check casts after fallback for
+ // backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
+ for cast in deferred_cast_checks {
+ cast.check(&mut table);
+ }
+
// FIXME resolve obligations as well (use Guidance if necessary)
table.resolve_obligations_as_possible();
@@ -1172,7 +1184,7 @@ impl<'a> InferenceContext<'a> {
unresolved: Option<usize>,
path: &ModPath,
) -> (Ty, Option<VariantId>) {
- let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0);
+ let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
match remaining {
None => {
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
@@ -1232,7 +1244,9 @@ impl<'a> InferenceContext<'a> {
.as_function()?
.lookup(self.db.upcast())
.container
- else { return None };
+ else {
+ return None;
+ };
self.resolve_output_on(trait_)
}
@@ -1322,7 +1336,7 @@ impl Expectation {
/// The primary use case is where the expected type is a fat pointer,
/// like `&[isize]`. For example, consider the following statement:
///
- /// let x: &[isize] = &[1, 2, 3];
+ /// let it: &[isize] = &[1, 2, 3];
///
/// In this case, the expected type for the `&[1, 2, 3]` expression is
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
new file mode 100644
index 000000000..9e1c74b16
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
@@ -0,0 +1,46 @@
+//! Type cast logic. Basically coercion + additional casts.
+
+use crate::{infer::unify::InferenceTable, Interner, Ty, TyExt, TyKind};
+
+#[derive(Clone, Debug)]
+pub(super) struct CastCheck {
+ expr_ty: Ty,
+ cast_ty: Ty,
+}
+
+impl CastCheck {
+ pub(super) fn new(expr_ty: Ty, cast_ty: Ty) -> Self {
+ Self { expr_ty, cast_ty }
+ }
+
+ pub(super) fn check(self, table: &mut InferenceTable<'_>) {
+ // FIXME: This function currently only implements the bits that influence the type
+ // inference. We should return the adjustments on success and report diagnostics on error.
+ let expr_ty = table.resolve_ty_shallow(&self.expr_ty);
+ let cast_ty = table.resolve_ty_shallow(&self.cast_ty);
+
+ if expr_ty.contains_unknown() || cast_ty.contains_unknown() {
+ return;
+ }
+
+ if table.coerce(&expr_ty, &cast_ty).is_ok() {
+ return;
+ }
+
+ if check_ref_to_ptr_cast(expr_ty, cast_ty, table) {
+ // Note that this type of cast is actually split into a coercion to a
+ // pointer type and a cast:
+ // &[T; N] -> *[T; N] -> *T
+ return;
+ }
+
+ // FIXME: Check other kinds of non-coercion casts and report error if any?
+ }
+}
+
+fn check_ref_to_ptr_cast(expr_ty: Ty, cast_ty: Ty, table: &mut InferenceTable<'_>) -> bool {
+ let Some((expr_inner_ty, _, _)) = expr_ty.as_reference() else { return false; };
+ let Some((cast_inner_ty, _)) = cast_ty.as_raw_ptr() else { return false; };
+ let TyKind::Array(expr_elt_ty, _) = expr_inner_ty.kind(Interner) else { return false; };
+ table.coerce(expr_elt_ty, cast_inner_ty).is_ok()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index ff64ae252..1781f6c58 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -139,7 +139,7 @@ impl HirPlace {
) -> CaptureKind {
match current_capture {
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
- if self.projections[len..].iter().any(|x| *x == ProjectionElem::Deref) {
+ if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
current_capture = CaptureKind::ByRef(BorrowKind::Unique);
}
}
@@ -199,7 +199,7 @@ impl CapturedItem {
.to_string(),
VariantData::Tuple(fields) => fields
.iter()
- .position(|x| x.0 == f.local_id)
+ .position(|it| it.0 == f.local_id)
.unwrap_or_default()
.to_string(),
VariantData::Unit => "[missing field]".to_string(),
@@ -439,10 +439,10 @@ impl InferenceContext<'_> {
}
fn walk_expr(&mut self, tgt_expr: ExprId) {
- if let Some(x) = self.result.expr_adjustments.get_mut(&tgt_expr) {
+ if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
// FIXME: this take is completely unneeded, and just is here to make borrow checker
// happy. Remove it if you can.
- let x_taken = mem::take(x);
+ let x_taken = mem::take(it);
self.walk_expr_with_adjust(tgt_expr, &x_taken);
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
} else {
@@ -488,10 +488,6 @@ impl InferenceContext<'_> {
self.consume_expr(*tail);
}
}
- Expr::While { condition, body, label: _ } => {
- self.consume_expr(*condition);
- self.consume_expr(*body);
- }
Expr::Call { callee, args, is_assignee_expr: _ } => {
self.consume_expr(*callee);
self.consume_exprs(args.iter().copied());
@@ -536,7 +532,7 @@ impl InferenceContext<'_> {
if let &Some(expr) = spread {
self.consume_expr(expr);
}
- self.consume_exprs(fields.iter().map(|x| x.expr));
+ self.consume_exprs(fields.iter().map(|it| it.expr));
}
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
@@ -548,7 +544,7 @@ impl InferenceContext<'_> {
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
let mutability = 'b: {
if let Some(deref_trait) =
- self.resolve_lang_item(LangItem::DerefMut).and_then(|x| x.as_trait())
+ self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
{
if let Some(deref_fn) =
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
@@ -615,8 +611,8 @@ impl InferenceContext<'_> {
"We sort closures, so we should always have data for inner closures",
);
let mut cc = mem::take(&mut self.current_captures);
- cc.extend(captures.iter().filter(|x| self.is_upvar(&x.place)).map(|x| {
- CapturedItemWithoutTy { place: x.place.clone(), kind: x.kind, span: x.span }
+ cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
+ CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
}));
self.current_captures = cc;
}
@@ -694,7 +690,7 @@ impl InferenceContext<'_> {
},
},
}
- if self.result.pat_adjustments.get(&p).map_or(false, |x| !x.is_empty()) {
+ if self.result.pat_adjustments.get(&p).map_or(false, |it| !it.is_empty()) {
for_mut = BorrowKind::Unique;
}
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
@@ -706,9 +702,9 @@ impl InferenceContext<'_> {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None;
- if let Some(x) = self.result.expr_adjustments.get(&e) {
- if let Some(x) = x.last() {
- ty = Some(x.target.clone());
+ if let Some(it) = self.result.expr_adjustments.get(&e) {
+ if let Some(it) = it.last() {
+ ty = Some(it.target.clone());
}
}
ty.unwrap_or_else(|| self.expr_ty(e))
@@ -727,7 +723,7 @@ impl InferenceContext<'_> {
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
// should probably let chalk know which closures are copy, but I don't know how doing it
// without creating query cycles.
- return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true);
+ return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
}
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
}
@@ -748,7 +744,7 @@ impl InferenceContext<'_> {
}
fn minimize_captures(&mut self) {
- self.current_captures.sort_by_key(|x| x.place.projections.len());
+ self.current_captures.sort_by_key(|it| it.place.projections.len());
let mut hash_map = HashMap::<HirPlace, usize>::new();
let result = mem::take(&mut self.current_captures);
for item in result {
@@ -759,7 +755,7 @@ impl InferenceContext<'_> {
break Some(*k);
}
match it.next() {
- Some(x) => lookup_place.projections.push(x.clone()),
+ Some(it) => lookup_place.projections.push(it.clone()),
None => break None,
}
};
@@ -780,7 +776,7 @@ impl InferenceContext<'_> {
}
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
- let cnt = self.result.pat_adjustments.get(&pat).map(|x| x.len()).unwrap_or_default();
+ let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
place.projections = place
.projections
.iter()
@@ -894,10 +890,10 @@ impl InferenceContext<'_> {
fn closure_kind(&self) -> FnTrait {
let mut r = FnTrait::Fn;
- for x in &self.current_captures {
+ for it in &self.current_captures {
r = cmp::min(
r,
- match &x.kind {
+ match &it.kind {
CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => {
FnTrait::FnMut
}
@@ -933,7 +929,7 @@ impl InferenceContext<'_> {
}
self.minimize_captures();
let result = mem::take(&mut self.current_captures);
- let captures = result.into_iter().map(|x| x.with_ty(self)).collect::<Vec<_>>();
+ let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
self.result.closure_info.insert(closure, (captures, closure_kind));
closure_kind
}
@@ -973,20 +969,20 @@ impl InferenceContext<'_> {
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<ClosureId, usize> =
- deferred_closures.keys().map(|x| (*x, 0)).collect();
+ deferred_closures.keys().map(|it| (*it, 0)).collect();
for (_, deps) in &self.closure_dependencies {
for dep in deps {
*dependents_count.entry(*dep).or_default() += 1;
}
}
let mut queue: Vec<_> =
- deferred_closures.keys().copied().filter(|x| dependents_count[x] == 0).collect();
+ deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect();
let mut result = vec![];
- while let Some(x) = queue.pop() {
- if let Some(d) = deferred_closures.remove(&x) {
- result.push((x, d));
+ while let Some(it) = queue.pop() {
+ if let Some(d) = deferred_closures.remove(&it) {
+ result.push((it, d));
}
- for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) {
+ for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
let cnt = dependents_count.get_mut(dep).unwrap();
*cnt -= 1;
if *cnt == 0 {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 05a476f63..8e7e62c49 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -220,7 +220,7 @@ pub(crate) fn coerce(
Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
}
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
/// Unify two types, but may coerce the first one to the second one
/// using "implicit coercion rules" if needed.
pub(super) fn coerce(
@@ -239,7 +239,7 @@ impl<'a> InferenceContext<'a> {
}
}
-impl<'a> InferenceTable<'a> {
+impl InferenceTable<'_> {
/// Unify two types, but may coerce the first one to the second one
/// using "implicit coercion rules" if needed.
pub(crate) fn coerce(
@@ -377,7 +377,7 @@ impl<'a> InferenceTable<'a> {
let snapshot = self.snapshot();
- let mut autoderef = Autoderef::new(self, from_ty.clone());
+ let mut autoderef = Autoderef::new(self, from_ty.clone(), false);
let mut first_error = None;
let mut found = None;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 194471f00..8cbdae625 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -46,11 +46,11 @@ use crate::{
};
use super::{
- coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges, Expectation,
- InferenceContext, InferenceDiagnostic, TypeMismatch,
+ cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
};
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(tgt_expr, expected);
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
@@ -198,19 +198,6 @@ impl<'a> InferenceContext<'a> {
None => self.result.standard_types.never.clone(),
}
}
- &Expr::While { condition, body, label } => {
- self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
- this.infer_expr(
- condition,
- &Expectation::HasType(this.result.standard_types.bool_.clone()),
- );
- this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
- });
-
- // the body may not run, so it diverging doesn't mean we diverge
- self.diverges = Diverges::Maybe;
- TyBuilder::unit()
- }
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => {
assert_eq!(args.len(), arg_types.len());
@@ -316,7 +303,7 @@ impl<'a> InferenceContext<'a> {
}
Expr::Call { callee, args, .. } => {
let callee_ty = self.infer_expr(*callee, &Expectation::none());
- let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false);
let (res, derefed_callee) = 'b: {
// manual loop to be able to access `derefs.table`
while let Some((callee_deref_ty, _)) = derefs.next() {
@@ -574,16 +561,8 @@ impl<'a> InferenceContext<'a> {
}
Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref);
- // FIXME: propagate the "castable to" expectation
- let inner_ty = self.infer_expr_no_expect(*expr);
- match (inner_ty.kind(Interner), cast_ty.kind(Interner)) {
- (TyKind::Ref(_, _, inner), TyKind::Raw(_, cast)) => {
- // FIXME: record invalid cast diagnostic in case of mismatch
- self.unify(inner, cast);
- }
- // FIXME check the other kinds of cast...
- _ => (),
- }
+ let expr_ty = self.infer_expr(*expr, &Expectation::Castable(cast_ty.clone()));
+ self.deferred_cast_checks.push(CastCheck::new(expr_ty, cast_ty.clone()));
cast_ty
}
Expr::Ref { expr, rawness, mutability } => {
@@ -928,7 +907,7 @@ impl<'a> InferenceContext<'a> {
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
if adjustments
.last()
- .map(|x| matches!(x.kind, Adjust::Borrow(_)))
+ .map(|it| matches!(it.kind, Adjust::Borrow(_)))
.unwrap_or(true)
{
// prefer reborrow to move
@@ -1385,7 +1364,7 @@ impl<'a> InferenceContext<'a> {
receiver_ty: &Ty,
name: &Name,
) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> {
- let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone());
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false);
let mut private_field = None;
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
let (field_id, parameters) = match derefed_ty.kind(Interner) {
@@ -1449,6 +1428,13 @@ impl<'a> InferenceContext<'a> {
fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
+
+ if name.is_missing() {
+ // Bail out early, don't even try to look up field. Also, we don't issue an unresolved
+ // field diagnostic because this is a syntax error rather than a semantic error.
+ return self.err_ty();
+ }
+
match self.lookup_field(&receiver_ty, name) {
Some((ty, field_id, adjustments, is_public)) => {
self.write_expr_adj(receiver, adjustments);
@@ -1585,7 +1571,7 @@ impl<'a> InferenceContext<'a> {
output: Ty,
inputs: Vec<Ty>,
) -> Vec<Ty> {
- if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ if let Some(expected_ty) = expected_output.only_has_type(&mut self.table) {
self.table.fudge_inference(|table| {
if table.try_unify(&expected_ty, &output).is_ok() {
table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
@@ -1658,6 +1644,7 @@ impl<'a> InferenceContext<'a> {
// the parameter to coerce to the expected type (for example in
// `coerce_unsize_expected_type_4`).
let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
let expected = Expectation::rvalue_hint(self, expected_ty);
// infer with the expected type we have...
let ty = self.infer_expr_inner(arg, &expected);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
index 46f2e1d7d..396ca0044 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
@@ -12,7 +12,7 @@ use crate::{lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, Ov
use super::InferenceContext;
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
pub(crate) fn infer_mut_body(&mut self) {
self.infer_mut_expr(self.body.body_expr, Mutability::Not);
}
@@ -69,16 +69,12 @@ impl<'a> InferenceContext<'a> {
self.infer_mut_expr(*tail, Mutability::Not);
}
}
- &Expr::While { condition: c, body, label: _ } => {
- self.infer_mut_expr(c, Mutability::Not);
- self.infer_mut_expr(body, Mutability::Not);
- }
- Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ }
- | Expr::Call { callee: x, args, is_assignee_expr: _ } => {
- self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x)));
+ Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ }
+ | Expr::Call { callee: it, args, is_assignee_expr: _ } => {
+ self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it)));
}
Expr::Match { expr, arms } => {
- let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat));
+ let m = self.pat_iter_bound_mutability(arms.iter().map(|it| it.pat));
self.infer_mut_expr(*expr, m);
for arm in arms.iter() {
self.infer_mut_expr(arm.expr, Mutability::Not);
@@ -96,7 +92,7 @@ impl<'a> InferenceContext<'a> {
}
}
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
- self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread))
+ self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut {
@@ -204,8 +200,8 @@ impl<'a> InferenceContext<'a> {
}
/// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions
- /// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in
- /// `let (ref x0, ref x1) = *x;` we should use `Deref`.
+ /// mutable. For example in `let (ref mut x0, ref x1) = *it;` we need to use `DerefMut` for `*it` but in
+ /// `let (ref x0, ref x1) = *it;` we should use `Deref`.
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
let mut r = Mutability::Not;
self.body.walk_bindings_in_pat(pat, |b| {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index 2480f8bab..5da0ab76b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -56,7 +56,7 @@ impl PatLike for PatId {
}
}
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
/// Infers type for tuple struct pattern or its corresponding assignee expression.
///
/// Ellipses found in the original pattern or expression must be filtered out.
@@ -306,7 +306,7 @@ impl<'a> InferenceContext<'a> {
self.result
.pat_adjustments
.get(&pat)
- .and_then(|x| x.first())
+ .and_then(|it| it.first())
.unwrap_or(&self.result.type_of_pat[pat])
.clone()
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index e33d8f179..0fb71135b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -22,7 +22,7 @@ use crate::{
TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
};
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
&mut self,
t: T,
@@ -91,7 +91,7 @@ pub(crate) fn unify(
let mut table = InferenceTable::new(db, env);
let vars = Substitution::from_iter(
Interner,
- tys.binders.iter(Interner).map(|x| match &x.kind {
+ tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => {
GenericArgData::Ty(table.new_type_var()).intern(Interner)
}
@@ -252,7 +252,8 @@ impl<'a> InferenceTable<'a> {
// and registering an obligation. But it needs chalk support, so we handle the most basic
// case (a non associated const without generic parameters) manually.
if subst.len(Interner) == 0 {
- if let Ok(eval) = self.db.const_eval((*c_id).into(), subst.clone())
+ if let Ok(eval) =
+ self.db.const_eval((*c_id).into(), subst.clone(), None)
{
eval
} else {
@@ -547,7 +548,7 @@ impl<'a> InferenceTable<'a> {
table: &'a mut InferenceTable<'b>,
highest_known_var: InferenceVar,
}
- impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
+ impl TypeFolder<Interner> for VarFudger<'_, '_> {
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
self
}
@@ -686,8 +687,8 @@ impl<'a> InferenceTable<'a> {
let mut arg_tys = vec![];
let arg_ty = TyBuilder::tuple(num_args)
- .fill(|x| {
- let arg = match x {
+ .fill(|it| {
+ let arg = match it {
ParamKind::Type => self.new_type_var(),
ParamKind::Const(ty) => {
never!("Tuple with const parameter");
@@ -753,7 +754,7 @@ impl<'a> InferenceTable<'a> {
{
fold_tys_and_consts(
ty,
- |x, _| match x {
+ |it, _| match it {
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
},
@@ -785,7 +786,7 @@ impl<'a> InferenceTable<'a> {
crate::ConstScalar::Unknown => self.new_const_var(data.ty.clone()),
// try to evaluate unevaluated const. Replace with new var if const eval failed.
crate::ConstScalar::UnevaluatedConst(id, subst) => {
- if let Ok(eval) = self.db.const_eval(*id, subst.clone()) {
+ if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) {
eval
} else {
self.new_const_var(data.ty.clone())
@@ -798,7 +799,7 @@ impl<'a> InferenceTable<'a> {
}
}
-impl<'a> fmt::Debug for InferenceTable<'a> {
+impl fmt::Debug for InferenceTable<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
}
@@ -826,7 +827,7 @@ mod resolve {
pub(super) var_stack: &'a mut Vec<InferenceVar>,
pub(super) fallback: F,
}
- impl<'a, 'b, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
+ impl<F> TypeFolder<Interner> for Resolver<'_, '_, F>
where
F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
{
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index 35d3407c1..b15339d44 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -1,13 +1,12 @@
//! Compute the binary representation of a type
-use base_db::CrateId;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
Abi, FieldsShape, Integer, LayoutCalculator, LayoutS, Primitive, ReprOptions, Scalar, Size,
StructKind, TargetDataLayout, WrappingRange,
},
- LocalEnumVariantId, LocalFieldId,
+ LocalEnumVariantId, LocalFieldId, StructId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
@@ -15,7 +14,7 @@ use triomphe::Arc;
use crate::{
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
- utils::ClosureSubst, Interner, Substitution, TraitEnvironment, Ty,
+ utils::ClosureSubst, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
};
pub use self::{
@@ -24,8 +23,8 @@ pub use self::{
};
macro_rules! user_error {
- ($x: expr) => {
- return Err(LayoutError::UserError(format!($x)))
+ ($it: expr) => {
+ return Err(LayoutError::UserError(format!($it)))
};
}
@@ -61,7 +60,6 @@ pub enum LayoutError {
}
struct LayoutCx<'a> {
- krate: CrateId,
target: &'a TargetDataLayout,
}
@@ -77,18 +75,101 @@ impl<'a> LayoutCalculator for LayoutCx<'a> {
}
}
+// FIXME: move this to the `rustc_abi`.
+fn layout_of_simd_ty(
+ db: &dyn HirDatabase,
+ id: StructId,
+ subst: &Substitution,
+ env: Arc<TraitEnvironment>,
+ dl: &TargetDataLayout,
+) -> Result<Arc<Layout>, LayoutError> {
+ let fields = db.field_types(id.into());
+
+ // Supported SIMD vectors are homogeneous ADTs with at least one field:
+ //
+ // * #[repr(simd)] struct S(T, T, T, T);
+ // * #[repr(simd)] struct S { it: T, y: T, z: T, w: T }
+ // * #[repr(simd)] struct S([T; 4])
+ //
+ // where T is a primitive scalar (integer/float/pointer).
+
+ let f0_ty = match fields.iter().next() {
+ Some(it) => it.1.clone().substitute(Interner, subst),
+ None => {
+ user_error!("simd type with zero fields");
+ }
+ };
+
+ // The element type and number of elements of the SIMD vector
+ // are obtained from:
+ //
+ // * the element type and length of the single array field, if
+ // the first field is of array type, or
+ //
+ // * the homogeneous field type and the number of fields.
+ let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) {
+ // Extract the number of elements from the layout of the array field:
+ let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields else {
+ user_error!("Array with non array layout");
+ };
+
+ (e_ty.clone(), count, true)
+ } else {
+ // First ADT field is not an array:
+ (f0_ty, fields.iter().count() as u64, false)
+ };
+
+ // Compute the ABI of the element type:
+ let e_ly = db.layout_of_ty(e_ty, env.clone())?;
+ let Abi::Scalar(e_abi) = e_ly.abi else {
+ user_error!("simd type with inner non scalar type");
+ };
+
+ // Compute the size and alignment of the vector:
+ let size = e_ly.size.checked_mul(e_len, dl).ok_or(LayoutError::SizeOverflow)?;
+ let align = dl.vector_align(size);
+ let size = size.align_to(align.abi);
+
+ // Compute the placement of the vector fields:
+ let fields = if is_array {
+ FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() }
+ } else {
+ FieldsShape::Array { stride: e_ly.size, count: e_len }
+ };
+
+ Ok(Arc::new(Layout {
+ variants: Variants::Single { index: struct_variant_idx() },
+ fields,
+ abi: Abi::Vector { element: e_abi, count: e_len },
+ largest_niche: e_ly.largest_niche,
+ size,
+ align,
+ }))
+}
+
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: Ty,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
- let cx = LayoutCx { krate, target: &target };
+ let krate = trait_env.krate;
+ let Some(target) = db.target_data_layout(krate) else {
+ return Err(LayoutError::TargetLayoutNotAvailable);
+ };
+ let cx = LayoutCx { target: &target };
let dl = &*cx.current_data_layout();
- let trait_env = Arc::new(TraitEnvironment::empty(krate));
- let ty = normalize(db, trait_env, ty.clone());
+ let ty = normalize(db, trait_env.clone(), ty.clone());
let result = match ty.kind(Interner) {
- TyKind::Adt(AdtId(def), subst) => return db.layout_of_adt(*def, subst.clone(), krate),
+ TyKind::Adt(AdtId(def), subst) => {
+ if let hir_def::AdtId::StructId(s) = def {
+ let data = db.struct_data(*s);
+ let repr = data.repr.unwrap_or_default();
+ if repr.simd() {
+ return layout_of_simd_ty(db, *s, subst, trait_env.clone(), &target);
+ }
+ };
+ return db.layout_of_adt(*def, subst.clone(), trait_env.clone());
+ }
TyKind::Scalar(s) => match s {
chalk_ir::Scalar::Bool => Layout::scalar(
dl,
@@ -145,9 +226,9 @@ pub fn layout_of_ty_query(
let fields = tys
.iter(Interner)
- .map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
+ .map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
- let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
+ let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
@@ -155,7 +236,7 @@ pub fn layout_of_ty_query(
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(
"unevaluated or mistyped const generic parameter".to_string(),
))? as u64;
- let element = db.layout_of_ty(element.clone(), krate)?;
+ let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
@@ -176,7 +257,7 @@ pub fn layout_of_ty_query(
}
}
TyKind::Slice(element) => {
- let element = db.layout_of_ty(element.clone(), krate)?;
+ let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count: 0 },
@@ -198,7 +279,15 @@ pub fn layout_of_ty_query(
// return Ok(tcx.mk_layout(LayoutS::scalar(cx, data_ptr)));
// }
- let unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
+ let mut unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
+ if let TyKind::AssociatedType(id, subst) = unsized_part.kind(Interner) {
+ unsized_part = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
+ associated_ty_id: *id,
+ substitution: subst.clone(),
+ }))
+ .intern(Interner);
+ }
+ unsized_part = normalize(db, trait_env.clone(), unsized_part);
let metadata = match unsized_part.kind(Interner) {
TyKind::Slice(_) | TyKind::Str => {
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
@@ -252,7 +341,7 @@ pub fn layout_of_ty_query(
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = db.infer(func.into());
- return db.layout_of_ty(infer.type_of_rpit[idx].clone(), krate);
+ return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env.clone());
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
return Err(LayoutError::NotImplemented)
@@ -265,14 +354,14 @@ pub fn layout_of_ty_query(
let (captures, _) = infer.closure_info(c);
let fields = captures
.iter()
- .map(|x| {
+ .map(|it| {
db.layout_of_ty(
- x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
- krate,
+ it.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
+ trait_env.clone(),
)
})
.collect::<Result<Vec<_>, _>>()?;
- let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
+ let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
@@ -281,8 +370,16 @@ pub fn layout_of_ty_query(
return Err(LayoutError::NotImplemented)
}
TyKind::Error => return Err(LayoutError::HasErrorType),
- TyKind::AssociatedType(_, _)
- | TyKind::Alias(_)
+ TyKind::AssociatedType(id, subst) => {
+ // Try again with `TyKind::Alias` to normalize the associated type.
+ let ty = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
+ associated_ty_id: *id,
+ substitution: subst.clone(),
+ }))
+ .intern(Interner);
+ return db.layout_of_ty(ty, trait_env);
+ }
+ TyKind::Alias(_)
| TyKind::Placeholder(_)
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder),
@@ -294,7 +391,7 @@ pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Ty,
- _: &CrateId,
+ _: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
@@ -315,7 +412,10 @@ fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
let data = db.struct_data(*i);
let mut it = data.variant_data.fields().iter().rev();
match it.next() {
- Some((f, _)) => field_ty(db, (*i).into(), f, subst),
+ Some((f, _)) => {
+ let last_field_ty = field_ty(db, (*i).into(), f, subst);
+ struct_tail_erasing_lifetimes(db, last_field_ty)
+ }
None => pointee,
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index bd2752a71..1c92e80f3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -2,7 +2,6 @@
use std::{cmp, ops::Bound};
-use base_db::CrateId;
use hir_def::{
data::adt::VariantData,
layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
@@ -16,7 +15,7 @@ use crate::{
db::HirDatabase,
lang_items::is_unsafe_cell,
layout::{field_ty, Layout, LayoutError, RustcEnumVariantIdx},
- Substitution,
+ Substitution, TraitEnvironment,
};
use super::LayoutCx;
@@ -29,15 +28,18 @@ pub fn layout_of_adt_query(
db: &dyn HirDatabase,
def: AdtId,
subst: Substitution,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
- let cx = LayoutCx { krate, target: &target };
+ let krate = trait_env.krate;
+ let Some(target) = db.target_data_layout(krate) else {
+ return Err(LayoutError::TargetLayoutNotAvailable);
+ };
+ let cx = LayoutCx { target: &target };
let dl = cx.current_data_layout();
let handle_variant = |def: VariantId, var: &VariantData| {
var.fields()
.iter()
- .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), cx.krate))
+ .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()
};
let (variants, repr) = match def {
@@ -70,9 +72,9 @@ pub fn layout_of_adt_query(
};
let variants = variants
.iter()
- .map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>())
+ .map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
- let variants = variants.iter().map(|x| x.iter().collect()).collect();
+ let variants = variants.iter().map(|it| it.iter().collect()).collect();
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
@@ -103,7 +105,7 @@ pub fn layout_of_adt_query(
&& variants
.iter()
.next()
- .and_then(|x| x.last().map(|x| x.is_unsized()))
+ .and_then(|it| it.last().map(|it| !it.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)?
@@ -116,9 +118,9 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
let get = |name| {
let attr = attrs.by_key(name).tt_values();
for tree in attr {
- if let Some(x) = tree.token_trees.first() {
- if let Ok(x) = x.to_string().parse() {
- return Bound::Included(x);
+ if let Some(it) = tree.token_trees.first() {
+ if let Ok(it) = it.to_string().parse() {
+ return Bound::Included(it);
}
}
}
@@ -132,7 +134,7 @@ pub fn layout_of_adt_recover(
_: &[String],
_: &AdtId,
_: &Substitution,
- _: &CrateId,
+ _: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index 0ff8c532d..333ad473a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -26,7 +26,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
);
let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
- let (adt_or_type_alias_id, module_id) = file_ids
+ let adt_or_type_alias_id = file_ids
.into_iter()
.find_map(|file_id| {
let module_id = db.module_for_file(file_id);
@@ -47,7 +47,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
}
_ => None,
})?;
- Some((adt_or_type_alias_id, module_id))
+ Some(adt_or_type_alias_id)
})
.unwrap();
let goal_ty = match adt_or_type_alias_id {
@@ -58,7 +58,13 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
db.ty(ty_id.into()).substitute(Interner, &Substitution::empty(Interner))
}
};
- db.layout_of_ty(goal_ty, module_id.krate())
+ db.layout_of_ty(
+ goal_ty,
+ db.trait_environment(match adt_or_type_alias_id {
+ Either::Left(adt) => hir_def::GenericDefId::AdtId(adt),
+ Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty),
+ }),
+ )
}
/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
@@ -72,7 +78,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
- let adt_id = scope
+ let function_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
@@ -82,11 +88,11 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
_ => None,
})
.unwrap();
- let hir_body = db.body(adt_id.into());
+ let hir_body = db.body(function_id.into());
let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
- let infer = db.infer(adt_id.into());
+ let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b].clone();
- db.layout_of_ty(goal_ty, module_id.krate())
+ db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
}
#[track_caller]
@@ -271,6 +277,20 @@ struct Goal(Foo<S>);
}
#[test]
+fn simd_types() {
+ check_size_and_align(
+ r#"
+ #[repr(simd)]
+ struct SimdType(i64, i64);
+ struct Goal(SimdType);
+ "#,
+ "",
+ 16,
+ 16,
+ );
+}
+
+#[test]
fn return_position_impl_trait() {
size_and_align_expr! {
trait T {}
@@ -344,6 +364,24 @@ fn return_position_impl_trait() {
}
#[test]
+fn unsized_ref() {
+ size_and_align! {
+ struct S1([u8]);
+ struct S2(S1);
+ struct S3(i32, str);
+ struct S4(u64, S3);
+ #[allow(dead_code)]
+ struct S5 {
+ field1: u8,
+ field2: i16,
+ field_last: S4,
+ }
+
+ struct Goal(&'static S1, &'static S2, &'static S3, &'static S4, &'static S5);
+ }
+}
+
+#[test]
fn enums() {
size_and_align! {
enum Goal {
@@ -369,11 +407,11 @@ fn tuple() {
}
#[test]
-fn non_zero() {
+fn non_zero_and_non_null() {
size_and_align! {
- minicore: non_zero, option;
- use core::num::NonZeroU8;
- struct Goal(Option<NonZeroU8>);
+ minicore: non_zero, non_null, option;
+ use core::{num::NonZeroU8, ptr::NonNull};
+ struct Goal(Option<NonZeroU8>, Option<NonNull<i32>>);
}
}
@@ -432,3 +470,41 @@ fn enums_with_discriminants() {
}
}
}
+
+#[test]
+fn core_mem_discriminant() {
+ size_and_align! {
+ minicore: discriminant;
+ struct S(i32, u64);
+ struct Goal(core::mem::Discriminant<S>);
+ }
+ size_and_align! {
+ minicore: discriminant;
+ #[repr(u32)]
+ enum S {
+ A,
+ B,
+ C,
+ }
+ struct Goal(core::mem::Discriminant<S>);
+ }
+ size_and_align! {
+ minicore: discriminant;
+ enum S {
+ A(i32),
+ B(i64),
+ C(u8),
+ }
+ struct Goal(core::mem::Discriminant<S>);
+ }
+ size_and_align! {
+ minicore: discriminant;
+ #[repr(C, u16)]
+ enum S {
+ A(i32),
+ B(i64) = 200,
+ C = 1000,
+ }
+ struct Goal(core::mem::Discriminant<S>);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 1a4d003bf..b3ca2a222 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -180,9 +180,16 @@ impl MemoryMap {
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
fn transform_addresses(
&self,
- mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
+ mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
) -> Result<HashMap<usize, usize>, MirEvalError> {
- self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
+ self.memory
+ .iter()
+ .map(|x| {
+ let addr = *x.0;
+ let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
+ Ok((addr, f(x.1, align)?))
+ })
+ .collect()
}
fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 9951a1c75..2837f400b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -23,7 +23,7 @@ use hir_def::{
generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
- lang_item::{lang_attr, LangItem},
+ lang_item::LangItem,
nameres::MacroSubNs,
path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs},
@@ -959,10 +959,10 @@ impl<'a> TyLoweringContext<'a> {
}
pub(crate) fn lower_where_predicate(
- &'a self,
- where_predicate: &'a WherePredicate,
+ &self,
+ where_predicate: &WherePredicate,
ignore_bindings: bool,
- ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ ) -> impl Iterator<Item = QuantifiedWhereClause> {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@@ -1012,7 +1012,7 @@ impl<'a> TyLoweringContext<'a> {
// (So ideally, we'd only ignore `~const Drop` here)
// - `Destruct` impls are built-in in 1.62 (current nightly as of 08-04-2022), so until
// the builtin impls are supported by Chalk, we ignore them here.
- if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
+ if let Some(lang) = self.db.lang_attr(tr.hir_trait_id().into()) {
if matches!(lang, LangItem::Drop | LangItem::Destruct) {
return false;
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index ab6430e8f..f3a5f69b2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -534,7 +534,7 @@ impl ReceiverAdjustments {
let mut ty = table.resolve_ty_shallow(&ty);
let mut adjust = Vec::new();
for _ in 0..self.autoderefs {
- match autoderef::autoderef_step(table, ty.clone()) {
+ match autoderef::autoderef_step(table, ty.clone(), true) {
None => {
never!("autoderef not possible for {:?}", ty);
ty = TyKind::Error.intern(Interner);
@@ -559,10 +559,10 @@ impl ReceiverAdjustments {
adjust.push(a);
}
if self.unsize_array {
- ty = 'x: {
+ ty = 'it: {
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
if let TyKind::Array(inner, _) = inner.kind(Interner) {
- break 'x TyKind::Ref(
+ break 'it TyKind::Ref(
m.clone(),
l.clone(),
TyKind::Slice(inner.clone()).intern(Interner),
@@ -665,13 +665,21 @@ pub fn is_dyn_method(
};
let self_ty = trait_ref.self_type_parameter(Interner);
if let TyKind::Dyn(d) = self_ty.kind(Interner) {
- let is_my_trait_in_bounds =
- d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() {
- // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
- // what the generics are, we are sure that the method is come from the vtable.
- WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id,
- _ => false,
- });
+ let is_my_trait_in_bounds = d
+ .bounds
+ .skip_binders()
+ .as_slice(Interner)
+ .iter()
+ .map(|it| it.skip_binders())
+ .flat_map(|it| match it {
+ WhereClause::Implemented(tr) => {
+ all_super_traits(db.upcast(), from_chalk_trait_id(tr.trait_id))
+ }
+ _ => smallvec![],
+ })
+ // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
+ // what the generics are, we are sure that the method is come from the vtable.
+ .any(|x| x == trait_id);
if is_my_trait_in_bounds {
return Some(fn_params);
}
@@ -682,14 +690,14 @@ pub fn is_dyn_method(
/// Looks up the impl method that actually runs for the trait method `func`.
///
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
-pub fn lookup_impl_method(
+pub(crate) fn lookup_impl_method_query(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution) {
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
- return (func, fn_subst)
+ return (func, fn_subst);
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
@@ -699,8 +707,8 @@ pub fn lookup_impl_method(
};
let name = &db.function_data(func).name;
- let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
- .and_then(|assoc| {
+ let Some((impl_fn, impl_subst)) =
+ lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name).and_then(|assoc| {
if let (AssocItemId::FunctionId(id), subst) = assoc {
Some((id, subst))
} else {
@@ -731,7 +739,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
let impls = db.trait_impls_in_deps(env.krate);
let self_impls = match self_ty.kind(Interner) {
TyKind::Adt(id, _) => {
- id.0.module(db.upcast()).containing_block().map(|x| db.trait_impls_in_block(x))
+ id.0.module(db.upcast()).containing_block().map(|it| db.trait_impls_in_block(it))
}
_ => None,
};
@@ -895,8 +903,8 @@ pub fn iterate_method_candidates_dyn(
// (just as rustc does an autoderef and then autoref again).
// We have to be careful about the order we're looking at candidates
- // in here. Consider the case where we're resolving `x.clone()`
- // where `x: &Vec<_>`. This resolves to the clone method with self
+ // in here. Consider the case where we're resolving `it.clone()`
+ // where `it: &Vec<_>`. This resolves to the clone method with self
// type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
// the receiver type exactly matches before cases where we have to
// do autoref. But in the autoderef steps, the `&_` self type comes
@@ -1012,8 +1020,8 @@ fn iterate_method_candidates_by_receiver(
let snapshot = table.snapshot();
// We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through
- // that.
- let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ // that, including raw derefs.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true);
while let Some((self_ty, _)) = autoderef.next() {
iterate_inherent_methods(
&self_ty,
@@ -1028,7 +1036,7 @@ fn iterate_method_candidates_by_receiver(
table.rollback_to(snapshot);
- let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true);
while let Some((self_ty, _)) = autoderef.next() {
iterate_trait_method_candidates(
&self_ty,
@@ -1480,8 +1488,8 @@ fn generic_implements_goal(
.push(self_ty.value.clone())
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
.build();
- kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
- let vk = match x.data(Interner) {
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|it| {
+ let vk = match it.data(Interner) {
chalk_ir::GenericArgData::Ty(_) => {
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
}
@@ -1504,7 +1512,7 @@ fn autoderef_method_receiver(
ty: Ty,
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
let mut deref_chain: Vec<_> = Vec::new();
- let mut autoderef = autoderef::Autoderef::new(table, ty);
+ let mut autoderef = autoderef::Autoderef::new(table, ty, false);
while let Some((ty, derefs)) = autoderef.next() {
deref_chain.push((
autoderef.table.canonicalize(ty).value,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index 2345bab0b..4723c25ed 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -3,9 +3,14 @@
use std::{fmt::Display, iter};
use crate::{
- consteval::usize_const, db::HirDatabase, display::HirDisplay, infer::PointerCast,
- lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar,
- InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind,
+ consteval::usize_const,
+ db::HirDatabase,
+ display::HirDisplay,
+ infer::{normalize, PointerCast},
+ lang_items::is_box,
+ mapping::ToChalk,
+ CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
+ Substitution, TraitEnvironment, Ty, TyKind,
};
use base_db::CrateId;
use chalk_ir::Mutability;
@@ -22,7 +27,9 @@ mod pretty;
mod monomorphization;
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
-pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap};
+pub use eval::{
+ interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
+};
pub use lower::{
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
@@ -32,6 +39,7 @@ pub use monomorphization::{
};
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
+use triomphe::Arc;
use super::consteval::{intern_const_scalar, try_const_usize};
@@ -129,13 +137,21 @@ pub enum ProjectionElem<V, T> {
impl<V, T> ProjectionElem<V, T> {
pub fn projected_ty(
&self,
- base: Ty,
+ mut base: Ty,
db: &dyn HirDatabase,
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
krate: CrateId,
) -> Ty {
+ if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) {
+ base = normalize(
+ db,
+ // FIXME: we should get this from caller
+ Arc::new(TraitEnvironment::empty(krate)),
+ base,
+ );
+ }
match self {
- ProjectionElem::Deref => match &base.data(Interner).kind {
+ ProjectionElem::Deref => match &base.kind(Interner) {
TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
TyKind::Adt(adt, subst) if is_box(db, adt.0) => {
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
@@ -145,7 +161,7 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner);
}
},
- ProjectionElem::Field(f) => match &base.data(Interner).kind {
+ ProjectionElem::Field(f) => match &base.kind(Interner) {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
}
@@ -154,7 +170,7 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner);
}
},
- ProjectionElem::TupleOrClosureField(f) => match &base.data(Interner).kind {
+ ProjectionElem::TupleOrClosureField(f) => match &base.kind(Interner) {
TyKind::Tuple(_, subst) => subst
.as_slice(Interner)
.get(*f)
@@ -171,7 +187,7 @@ impl<V, T> ProjectionElem<V, T> {
}
},
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
- match &base.data(Interner).kind {
+ match &base.kind(Interner) {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => {
never!("Overloaded index is not a projection");
@@ -179,7 +195,7 @@ impl<V, T> ProjectionElem<V, T> {
}
}
}
- &ProjectionElem::Subslice { from, to } => match &base.data(Interner).kind {
+ &ProjectionElem::Subslice { from, to } => match &base.kind(Interner) {
TyKind::Array(inner, c) => {
let next_c = usize_const(
db,
@@ -218,6 +234,7 @@ impl Place {
self.local == child.local && child.projection.starts_with(&self.projection)
}
+ /// The place itself is not included
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ {
(0..self.projection.len())
.map(|x| &self.projection[0..x])
@@ -321,8 +338,8 @@ impl SwitchTargets {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Terminator {
- span: MirSpan,
- kind: TerminatorKind,
+ pub span: MirSpan,
+ pub kind: TerminatorKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index a5dd0182e..ad98e8fa1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -52,7 +52,7 @@ fn all_mir_bodies(
let closures = body.closures.clone();
Box::new(
iter::once(Ok(body))
- .chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
+ .chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
)
}
Err(e) => Box::new(iter::once(Err(e))),
@@ -62,7 +62,7 @@ fn all_mir_bodies(
Ok(body) => {
let closures = body.closures.clone();
Box::new(
- iter::once(Ok(body)).chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
+ iter::once(Ok(body)).chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
)
}
Err(e) => Box::new(iter::once(Err(e))),
@@ -171,7 +171,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
}
TerminatorKind::Call { func, args, .. } => {
for_operand(func, terminator.span);
- args.iter().for_each(|x| for_operand(x, terminator.span));
+ args.iter().for_each(|it| for_operand(it, terminator.span));
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, terminator.span);
@@ -245,7 +245,7 @@ fn ever_initialized_map(
body: &MirBody,
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
- body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
+ body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
fn dfs(
db: &dyn HirDatabase,
body: &MirBody,
@@ -271,7 +271,10 @@ fn ever_initialized_map(
}
}
let Some(terminator) = &block.terminator else {
- never!("Terminator should be none only in construction.\nThe body:\n{}", body.pretty_print(db));
+ never!(
+ "Terminator should be none only in construction.\nThe body:\n{}",
+ body.pretty_print(db)
+ );
return;
};
let targets = match &terminator.kind {
@@ -311,7 +314,7 @@ fn ever_initialized_map(
result[body.start_block].insert(l, true);
dfs(db, body, body.start_block, l, &mut result);
}
- for l in body.locals.iter().map(|x| x.0) {
+ for l in body.locals.iter().map(|it| it.0) {
if !result[body.start_block].contains_idx(l) {
result[body.start_block].insert(l, false);
dfs(db, body, body.start_block, l, &mut result);
@@ -325,10 +328,10 @@ fn mutability_of_locals(
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
- body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
+ body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
let mut push_mut_span = |local, span| match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
- x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
+ it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
};
let ever_init_maps = ever_initialized_map(db, body);
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index 9acf9d39e..9e30eed56 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -1,6 +1,13 @@
//! This module provides a MIR interpreter, which is used in const eval.
-use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range};
+use std::{
+ borrow::Cow,
+ cell::RefCell,
+ collections::{HashMap, HashSet},
+ fmt::Write,
+ iter, mem,
+ ops::Range,
+};
use base_db::{CrateId, FileId};
use chalk_ir::Mutability;
@@ -8,12 +15,13 @@ use either::Either;
use hir_def::{
builtin_type::BuiltinType,
data::adt::{StructFlags, VariantData},
- lang_item::{lang_attr, LangItem},
+ lang_item::LangItem,
layout::{TagEncoding, Variants},
- AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
- VariantId,
+ resolver::{HasResolver, TypeNs, ValueNs},
+ AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
+ StaticId, VariantId,
};
-use hir_expand::InFile;
+use hir_expand::{mod_path::ModPath, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -28,7 +36,7 @@ use crate::{
infer::PointerCast,
layout::{Layout, LayoutError, RustcEnumVariantIdx},
mapping::from_chalk,
- method_resolution::{is_dyn_method, lookup_impl_method},
+ method_resolution::{is_dyn_method, lookup_impl_const},
name, static_lifetime,
traits::FnTrait,
utils::{detect_variant_from_bytes, ClosureSubst},
@@ -37,8 +45,9 @@ use crate::{
};
use super::{
- return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan, Operand,
- Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp,
+ return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError,
+ MirSpan, Operand, Place, PlaceElem, ProjectionElem, Rvalue, StatementKind, TerminatorKind,
+ UnOp,
};
mod shim;
@@ -48,15 +57,15 @@ mod tests;
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
- Ok(x) => x,
+ Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))),
}))
};
}
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirEvalError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirEvalError::NotSupported(format!($it)))
};
}
@@ -67,18 +76,22 @@ pub struct VTableMap {
}
impl VTableMap {
+ const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.
+
fn id(&mut self, ty: Ty) -> usize {
- if let Some(x) = self.ty_to_id.get(&ty) {
- return *x;
+ if let Some(it) = self.ty_to_id.get(&ty) {
+ return *it;
}
- let id = self.id_to_ty.len();
+ let id = self.id_to_ty.len() + VTableMap::OFFSET;
self.id_to_ty.push(ty.clone());
self.ty_to_id.insert(ty, id);
id
}
pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
- self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id))
+ id.checked_sub(VTableMap::OFFSET)
+ .and_then(|id| self.id_to_ty.get(id))
+ .ok_or(MirEvalError::InvalidVTableId(id))
}
fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
@@ -114,11 +127,25 @@ impl TlsData {
}
}
+struct StackFrame {
+ locals: Locals,
+ destination: Option<BasicBlockId>,
+ prev_stack_ptr: usize,
+ span: (MirSpan, DefWithBodyId),
+}
+
+#[derive(Clone)]
+enum MirOrDynIndex {
+ Mir(Arc<MirBody>),
+ Dyn(usize),
+}
+
pub struct Evaluator<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
stack: Vec<u8>,
heap: Vec<u8>,
+ code_stack: Vec<StackFrame>,
/// Stores the global location of the statics. We const evaluate every static first time we need it
/// and see it's missing, then we add it to this to reuse.
static_locations: FxHashMap<StaticId, Address>,
@@ -127,8 +154,21 @@ pub struct Evaluator<'a> {
/// time of use.
vtable_map: VTableMap,
thread_local_storage: TlsData,
+ random_state: oorandom::Rand64,
stdout: Vec<u8>,
stderr: Vec<u8>,
+ layout_cache: RefCell<FxHashMap<Ty, Arc<Layout>>>,
+ projected_ty_cache: RefCell<FxHashMap<(Ty, PlaceElem), Ty>>,
+ not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
+ mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
+ /// Constantly dropping and creating `Locals` is very costly. We store
+ /// old locals that we normaly want to drop here, to reuse their allocations
+ /// later.
+ unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
+ cached_ptr_size: usize,
+ cached_fn_trait_func: Option<FunctionId>,
+ cached_fn_mut_trait_func: Option<FunctionId>,
+ cached_fn_once_trait_func: Option<FunctionId>,
crate_id: CrateId,
// FIXME: This is a workaround, see the comment on `interpret_mir`
assert_placeholder_ty_is_unused: bool,
@@ -136,6 +176,8 @@ pub struct Evaluator<'a> {
execution_limit: usize,
/// An additional limit on stack depth, to prevent stack overflow
stack_depth_limit: usize,
+ /// Maximum count of bytes that heap and stack can grow
+ memory_limit: usize,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -192,7 +234,7 @@ impl IntervalAndTy {
addr: Address,
ty: Ty,
evaluator: &Evaluator<'_>,
- locals: &Locals<'_>,
+ locals: &Locals,
) -> Result<IntervalAndTy> {
let size = evaluator.size_of_sized(&ty, locals, "type of interval")?;
Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
@@ -226,18 +268,28 @@ impl IntervalOrOwned {
}
}
+#[cfg(target_pointer_width = "64")]
+const STACK_OFFSET: usize = 1 << 60;
+#[cfg(target_pointer_width = "64")]
+const HEAP_OFFSET: usize = 1 << 59;
+
+#[cfg(target_pointer_width = "32")]
+const STACK_OFFSET: usize = 1 << 30;
+#[cfg(target_pointer_width = "32")]
+const HEAP_OFFSET: usize = 1 << 29;
+
impl Address {
- fn from_bytes(x: &[u8]) -> Result<Self> {
- Ok(Address::from_usize(from_bytes!(usize, x)))
+ fn from_bytes(it: &[u8]) -> Result<Self> {
+ Ok(Address::from_usize(from_bytes!(usize, it)))
}
- fn from_usize(x: usize) -> Self {
- if x > usize::MAX / 2 {
- Stack(x - usize::MAX / 2)
- } else if x > usize::MAX / 4 {
- Heap(x - usize::MAX / 4)
+ fn from_usize(it: usize) -> Self {
+ if it > STACK_OFFSET {
+ Stack(it - STACK_OFFSET)
+ } else if it > HEAP_OFFSET {
+ Heap(it - HEAP_OFFSET)
} else {
- Invalid(x)
+ Invalid(it)
}
}
@@ -247,23 +299,23 @@ impl Address {
fn to_usize(&self) -> usize {
let as_num = match self {
- Stack(x) => *x + usize::MAX / 2,
- Heap(x) => *x + usize::MAX / 4,
- Invalid(x) => *x,
+ Stack(it) => *it + STACK_OFFSET,
+ Heap(it) => *it + HEAP_OFFSET,
+ Invalid(it) => *it,
};
as_num
}
fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
match self {
- Stack(x) => Stack(f(*x)),
- Heap(x) => Heap(f(*x)),
- Invalid(x) => Invalid(f(*x)),
+ Stack(it) => Stack(f(*it)),
+ Heap(it) => Heap(f(*it)),
+ Invalid(it) => Invalid(f(*it)),
}
}
fn offset(&self, offset: usize) -> Address {
- self.map(|x| x + offset)
+ self.map(|it| it + offset)
}
}
@@ -282,13 +334,14 @@ pub enum MirEvalError {
TypeIsUnsized(Ty, &'static str),
NotSupported(String),
InvalidConst(Const),
- InFunction(Either<FunctionId, ClosureId>, Box<MirEvalError>, MirSpan, DefWithBodyId),
+ InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
ExecutionLimitExceeded,
StackOverflow,
TargetDataLayoutNotAvailable,
InvalidVTableId(usize),
CoerceUnsizedError(Ty),
LangItemNotFound(LangItem),
+ BrokenLayout(Layout),
}
impl MirEvalError {
@@ -300,40 +353,42 @@ impl MirEvalError {
) -> std::result::Result<(), std::fmt::Error> {
writeln!(f, "Mir eval error:")?;
let mut err = self;
- while let MirEvalError::InFunction(func, e, span, def) = err {
+ while let MirEvalError::InFunction(e, stack) = err {
err = e;
- match func {
- Either::Left(func) => {
- let function_name = db.function_data(*func);
- writeln!(
- f,
- "In function {} ({:?})",
- function_name.name.display(db.upcast()),
- func
- )?;
- }
- Either::Right(clos) => {
- writeln!(f, "In {:?}", clos)?;
+ for (func, span, def) in stack.iter().take(30).rev() {
+ match func {
+ Either::Left(func) => {
+ let function_name = db.function_data(*func);
+ writeln!(
+ f,
+ "In function {} ({:?})",
+ function_name.name.display(db.upcast()),
+ func
+ )?;
+ }
+ Either::Right(clos) => {
+ writeln!(f, "In {:?}", clos)?;
+ }
}
+ let source_map = db.body_with_source_map(*def).1;
+ let span: InFile<SyntaxNodePtr> = match span {
+ MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
+ Ok(s) => s.map(|it| it.into()),
+ Err(_) => continue,
+ },
+ MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
+ Ok(s) => s.map(|it| match it {
+ Either::Left(e) => e.into(),
+ Either::Right(e) => e.into(),
+ }),
+ Err(_) => continue,
+ },
+ MirSpan::Unknown => continue,
+ };
+ let file_id = span.file_id.original_file(db.upcast());
+ let text_range = span.value.text_range();
+ writeln!(f, "{}", span_formatter(file_id, text_range))?;
}
- let source_map = db.body_with_source_map(*def).1;
- let span: InFile<SyntaxNodePtr> = match span {
- MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
- Ok(s) => s.map(|x| x.into()),
- Err(_) => continue,
- },
- MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|x| match x {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
- Err(_) => continue,
- },
- MirSpan::Unknown => continue,
- };
- let file_id = span.file_id.original_file(db.upcast());
- let text_range = span.value.text_range();
- writeln!(f, "{}", span_formatter(file_id, text_range))?;
}
match err {
MirEvalError::InFunction(..) => unreachable!(),
@@ -373,6 +428,7 @@ impl MirEvalError {
| MirEvalError::TargetDataLayoutNotAvailable
| MirEvalError::CoerceUnsizedError(_)
| MirEvalError::LangItemNotFound(_)
+ | MirEvalError::BrokenLayout(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
}
Ok(())
@@ -407,19 +463,14 @@ impl std::fmt::Debug for MirEvalError {
Self::CoerceUnsizedError(arg0) => {
f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
}
+ Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(),
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
Self::InvalidConst(arg0) => {
let data = &arg0.data(Interner);
f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish()
}
- Self::InFunction(func, e, span, _) => {
- let mut e = &**e;
- let mut stack = vec![(*func, *span)];
- while let Self::InFunction(f, next_e, span, _) = e {
- e = &next_e;
- stack.push((*f, *span));
- }
+ Self::InFunction(e, stack) => {
f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
}
}
@@ -435,85 +486,126 @@ struct DropFlags {
impl DropFlags {
fn add_place(&mut self, p: Place) {
- if p.iterate_over_parents().any(|x| self.need_drop.contains(&x)) {
+ if p.iterate_over_parents().any(|it| self.need_drop.contains(&it)) {
return;
}
- self.need_drop.retain(|x| !p.is_parent(x));
+ self.need_drop.retain(|it| !p.is_parent(it));
self.need_drop.insert(p);
}
fn remove_place(&mut self, p: &Place) -> bool {
// FIXME: replace parents with parts
+ if let Some(parent) = p.iterate_over_parents().find(|it| self.need_drop.contains(&it)) {
+ self.need_drop.remove(&parent);
+ return true;
+ }
self.need_drop.remove(p)
}
+
+ fn clear(&mut self) {
+ self.need_drop.clear();
+ }
}
#[derive(Debug)]
-struct Locals<'a> {
- ptr: &'a ArenaMap<LocalId, Interval>,
- body: &'a MirBody,
+struct Locals {
+ ptr: ArenaMap<LocalId, Interval>,
+ body: Arc<MirBody>,
drop_flags: DropFlags,
}
pub fn interpret_mir(
db: &dyn HirDatabase,
- body: &MirBody,
+ body: Arc<MirBody>,
// FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
// they share their body with their parent, so in MIR lowering we have locals of the parent body, which
// might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
// a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
// (and probably should) do better here, for example by excluding bindings outside of the target expression.
assert_placeholder_ty_is_unused: bool,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> (Result<Const>, String, String) {
let ty = body.locals[return_slot()].ty.clone();
- let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
- let x: Result<Const> = (|| {
- let bytes = evaluator.interpret_mir(&body, None.into_iter())?;
+ let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env);
+ let it: Result<Const> = (|| {
+ if evaluator.ptr_size() != std::mem::size_of::<usize>() {
+ not_supported!("targets with different pointer size from host");
+ }
+ let bytes = evaluator.interpret_mir(body.clone(), None.into_iter())?;
let mut memory_map = evaluator.create_memory_map(
&bytes,
&ty,
- &Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() },
+ &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },
)?;
memory_map.vtable = evaluator.vtable_map.clone();
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
})();
(
- x,
+ it,
String::from_utf8_lossy(&evaluator.stdout).into_owned(),
String::from_utf8_lossy(&evaluator.stderr).into_owned(),
)
}
+#[cfg(test)]
+const EXECUTION_LIMIT: usize = 100_000;
+#[cfg(not(test))]
+const EXECUTION_LIMIT: usize = 10_000_000;
+
impl Evaluator<'_> {
pub fn new<'a>(
db: &'a dyn HirDatabase,
- body: &MirBody,
+ owner: DefWithBodyId,
assert_placeholder_ty_is_unused: bool,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> Evaluator<'a> {
- let crate_id = body.owner.module(db.upcast()).krate();
- let trait_env = db.trait_environment_for_body(body.owner);
+ let crate_id = owner.module(db.upcast()).krate();
Evaluator {
stack: vec![0],
heap: vec![0],
+ code_stack: vec![],
vtable_map: VTableMap::default(),
thread_local_storage: TlsData::default(),
static_locations: HashMap::default(),
db,
- trait_env,
+ random_state: oorandom::Rand64::new(0),
+ trait_env: trait_env.unwrap_or_else(|| db.trait_environment_for_body(owner)),
crate_id,
stdout: vec![],
stderr: vec![],
assert_placeholder_ty_is_unused,
stack_depth_limit: 100,
- execution_limit: 1000_000,
+ execution_limit: EXECUTION_LIMIT,
+ memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap
+ layout_cache: RefCell::new(HashMap::default()),
+ projected_ty_cache: RefCell::new(HashMap::default()),
+ not_special_fn_cache: RefCell::new(HashSet::default()),
+ mir_or_dyn_index_cache: RefCell::new(HashMap::default()),
+ unused_locals_store: RefCell::new(HashMap::default()),
+ cached_ptr_size: match db.target_data_layout(crate_id) {
+ Some(it) => it.pointer_size.bytes_usize(),
+ None => 8,
+ },
+ cached_fn_trait_func: db
+ .lang_item(crate_id, LangItem::Fn)
+ .and_then(|x| x.as_trait())
+ .and_then(|x| db.trait_data(x).method_by_name(&name![call])),
+ cached_fn_mut_trait_func: db
+ .lang_item(crate_id, LangItem::FnMut)
+ .and_then(|x| x.as_trait())
+ .and_then(|x| db.trait_data(x).method_by_name(&name![call_mut])),
+ cached_fn_once_trait_func: db
+ .lang_item(crate_id, LangItem::FnOnce)
+ .and_then(|x| x.as_trait())
+ .and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
}
}
- fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> {
+ fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
}
- fn place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
+ fn place_interval(&self, p: &Place, locals: &Locals) -> Result<Interval> {
let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
Ok(Interval {
addr: place_addr_and_ty.0,
@@ -526,39 +618,47 @@ impl Evaluator<'_> {
}
fn ptr_size(&self) -> usize {
- match self.db.target_data_layout(self.crate_id) {
- Some(x) => x.pointer_size.bytes_usize(),
- None => 8,
+ self.cached_ptr_size
+ }
+
+ fn projected_ty(&self, ty: Ty, proj: PlaceElem) -> Ty {
+ let pair = (ty, proj);
+ if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
+ return r.clone();
}
+ let (ty, proj) = pair;
+ let r = proj.projected_ty(
+ ty.clone(),
+ self.db,
+ |c, subst, f| {
+ let (def, _) = self.db.lookup_intern_closure(c.into());
+ let infer = self.db.infer(def);
+ let (captures, _) = infer.closure_info(&c);
+ let parent_subst = ClosureSubst(subst).parent_subst();
+ captures
+ .get(f)
+ .expect("broken closure field")
+ .ty
+ .clone()
+ .substitute(Interner, parent_subst)
+ },
+ self.crate_id,
+ );
+ self.projected_ty_cache.borrow_mut().insert((ty, proj), r.clone());
+ r
}
fn place_addr_and_ty_and_metadata<'a>(
&'a self,
p: &Place,
- locals: &'a Locals<'a>,
+ locals: &'a Locals,
) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty = locals.body.locals[p.local].ty.clone();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in &*p.projection {
let prev_ty = ty.clone();
- ty = proj.projected_ty(
- ty,
- self.db,
- |c, subst, f| {
- let (def, _) = self.db.lookup_intern_closure(c.into());
- let infer = self.db.infer(def);
- let (captures, _) = infer.closure_info(&c);
- let parent_subst = ClosureSubst(subst).parent_subst();
- captures
- .get(f)
- .expect("broken closure field")
- .ty
- .clone()
- .substitute(Interner, parent_subst)
- },
- self.crate_id,
- );
+ ty = self.projected_ty(ty, proj.clone());
match proj {
ProjectionElem::Deref => {
metadata = if self.size_align_of(&ty, locals)?.is_none() {
@@ -569,8 +669,8 @@ impl Evaluator<'_> {
} else {
None
};
- let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
- addr = Address::from_usize(x);
+ let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
+ addr = Address::from_usize(it);
}
ProjectionElem::Index(op) => {
let offset = from_bytes!(
@@ -586,13 +686,13 @@ impl Evaluator<'_> {
let offset = if from_end {
let len = match prev_ty.kind(Interner) {
TyKind::Array(_, c) => match try_const_usize(self.db, c) {
- Some(x) => x as u64,
+ Some(it) => it as u64,
None => {
not_supported!("indexing array with unknown const from end")
}
},
TyKind::Slice(_) => match metadata {
- Some(x) => from_bytes!(u64, x.get(self)?),
+ Some(it) => from_bytes!(u64, it.get(self)?),
None => not_supported!("slice place without metadata"),
},
_ => not_supported!("bad type for const index"),
@@ -607,13 +707,13 @@ impl Evaluator<'_> {
addr = addr.offset(ty_size * offset);
}
&ProjectionElem::Subslice { from, to } => {
- let inner_ty = match &ty.data(Interner).kind {
+ let inner_ty = match &ty.kind(Interner) {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => TyKind::Error.intern(Interner),
};
metadata = match metadata {
- Some(x) => {
- let prev_len = from_bytes!(u64, x.get(self)?);
+ Some(it) => {
+ let prev_len = from_bytes!(u64, it.get(self)?);
Some(IntervalOrOwned::Owned(
(prev_len - from - to).to_le_bytes().to_vec(),
))
@@ -636,8 +736,8 @@ impl Evaluator<'_> {
Variants::Single { .. } => &layout,
Variants::Multiple { variants, .. } => {
&variants[match f.parent {
- hir_def::VariantId::EnumVariantId(x) => {
- RustcEnumVariantIdx(x.local_id)
+ hir_def::VariantId::EnumVariantId(it) => {
+ RustcEnumVariantIdx(it.local_id)
}
_ => {
return Err(MirEvalError::TypeError(
@@ -652,8 +752,10 @@ impl Evaluator<'_> {
.offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize();
addr = addr.offset(offset);
- // FIXME: support structs with unsized fields
- metadata = None;
+ // Unsized field metadata is equal to the metadata of the struct
+ if self.size_align_of(&ty, locals)?.is_some() {
+ metadata = None;
+ }
}
ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
}
@@ -662,22 +764,26 @@ impl Evaluator<'_> {
}
fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> {
- self.db
- .layout_of_ty(ty.clone(), self.crate_id)
- .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))
+ if let Some(x) = self.layout_cache.borrow().get(ty) {
+ return Ok(x.clone());
+ }
+ let r = self
+ .db
+ .layout_of_ty(ty.clone(), self.trait_env.clone())
+ .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))?;
+ self.layout_cache.borrow_mut().insert(ty.clone(), r.clone());
+ Ok(r)
}
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
- self.db.layout_of_adt(adt, subst.clone(), self.crate_id).map_err(|e| {
- MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
- })
+ self.layout(&TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
}
- fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> {
+ fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
}
- fn operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result<Ty> {
+ fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<Ty> {
Ok(match o {
Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
Operand::Constant(c) => c.data(Interner).ty.clone(),
@@ -688,11 +794,7 @@ impl Evaluator<'_> {
})
}
- fn operand_ty_and_eval(
- &mut self,
- o: &Operand,
- locals: &mut Locals<'_>,
- ) -> Result<IntervalAndTy> {
+ fn operand_ty_and_eval(&mut self, o: &Operand, locals: &mut Locals) -> Result<IntervalAndTy> {
Ok(IntervalAndTy {
interval: self.eval_operand(o, locals)?,
ty: self.operand_ty(o, locals)?,
@@ -701,39 +803,178 @@ impl Evaluator<'_> {
fn interpret_mir(
&mut self,
- body: &MirBody,
- args: impl Iterator<Item = Vec<u8>>,
+ body: Arc<MirBody>,
+ args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<Vec<u8>> {
- if let Some(x) = self.stack_depth_limit.checked_sub(1) {
- self.stack_depth_limit = x;
+ if let Some(it) = self.stack_depth_limit.checked_sub(1) {
+ self.stack_depth_limit = it;
} else {
return Err(MirEvalError::StackOverflow);
}
let mut current_block_idx = body.start_block;
- let mut locals =
- Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() };
- let (locals_ptr, stack_size) = {
- let mut stack_ptr = self.stack.len();
- let addr = body
- .locals
- .iter()
- .map(|(id, x)| {
- let size =
- self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?;
- let my_ptr = stack_ptr;
- stack_ptr += size;
- Ok((id, Interval { addr: Stack(my_ptr), size }))
- })
- .collect::<Result<ArenaMap<LocalId, _>>>()?;
- let stack_size = stack_ptr - self.stack.len();
- (addr, stack_size)
- };
- locals.ptr = &locals_ptr;
- self.stack.extend(iter::repeat(0).take(stack_size));
+ let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&body, None)?;
+ self.fill_locals_for_body(&body, &mut locals, args)?;
+ let prev_code_stack = mem::take(&mut self.code_stack);
+ let span = (MirSpan::Unknown, body.owner);
+ self.code_stack.push(StackFrame { locals, destination: None, prev_stack_ptr, span });
+ 'stack: loop {
+ let Some(mut my_stack_frame) = self.code_stack.pop() else {
+ not_supported!("missing stack frame");
+ };
+ let e = (|| {
+ let mut locals = &mut my_stack_frame.locals;
+ let body = locals.body.clone();
+ loop {
+ let current_block = &body.basic_blocks[current_block_idx];
+ if let Some(it) = self.execution_limit.checked_sub(1) {
+ self.execution_limit = it;
+ } else {
+ return Err(MirEvalError::ExecutionLimitExceeded);
+ }
+ for statement in &current_block.statements {
+ match &statement.kind {
+ StatementKind::Assign(l, r) => {
+ let addr = self.place_addr(l, &locals)?;
+ let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
+ self.write_memory(addr, &result)?;
+ locals.drop_flags.add_place(l.clone());
+ }
+ StatementKind::Deinit(_) => not_supported!("de-init statement"),
+ StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Nop => (),
+ }
+ }
+ let Some(terminator) = current_block.terminator.as_ref() else {
+ not_supported!("block without terminator");
+ };
+ match &terminator.kind {
+ TerminatorKind::Goto { target } => {
+ current_block_idx = *target;
+ }
+ TerminatorKind::Call {
+ func,
+ args,
+ destination,
+ target,
+ cleanup: _,
+ from_hir_call: _,
+ } => {
+ let destination_interval = self.place_interval(destination, &locals)?;
+ let fn_ty = self.operand_ty(func, &locals)?;
+ let args = args
+ .iter()
+ .map(|it| self.operand_ty_and_eval(it, &mut locals))
+ .collect::<Result<Vec<_>>>()?;
+ let stack_frame = match &fn_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let bytes = self.eval_operand(func, &mut locals)?;
+ self.exec_fn_pointer(
+ bytes,
+ destination_interval,
+ &args,
+ &locals,
+ *target,
+ terminator.span,
+ )?
+ }
+ TyKind::FnDef(def, generic_args) => self.exec_fn_def(
+ *def,
+ generic_args,
+ destination_interval,
+ &args,
+ &locals,
+ *target,
+ terminator.span,
+ )?,
+ it => not_supported!("unknown function type {it:?}"),
+ };
+ locals.drop_flags.add_place(destination.clone());
+ if let Some(stack_frame) = stack_frame {
+ self.code_stack.push(my_stack_frame);
+ current_block_idx = stack_frame.locals.body.start_block;
+ self.code_stack.push(stack_frame);
+ return Ok(None);
+ } else {
+ current_block_idx =
+ target.ok_or(MirEvalError::UndefinedBehavior(
+ "Diverging function returned".to_owned(),
+ ))?;
+ }
+ }
+ TerminatorKind::SwitchInt { discr, targets } => {
+ let val = u128::from_le_bytes(pad16(
+ self.eval_operand(discr, &mut locals)?.get(&self)?,
+ false,
+ ));
+ current_block_idx = targets.target_for_value(val);
+ }
+ TerminatorKind::Return => {
+ break;
+ }
+ TerminatorKind::Unreachable => {
+ return Err(MirEvalError::UndefinedBehavior(
+ "unreachable executed".to_owned(),
+ ));
+ }
+ TerminatorKind::Drop { place, target, unwind: _ } => {
+ self.drop_place(place, &mut locals, terminator.span)?;
+ current_block_idx = *target;
+ }
+ _ => not_supported!("unknown terminator"),
+ }
+ }
+ Ok(Some(my_stack_frame))
+ })();
+ let my_stack_frame = match e {
+ Ok(None) => continue 'stack,
+ Ok(Some(x)) => x,
+ Err(e) => {
+ let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack);
+ let mut error_stack = vec![];
+ for frame in my_code_stack.into_iter().rev() {
+ if let DefWithBodyId::FunctionId(f) = frame.locals.body.owner {
+ error_stack.push((Either::Left(f), frame.span.0, frame.span.1));
+ }
+ }
+ return Err(MirEvalError::InFunction(Box::new(e), error_stack));
+ }
+ };
+ let return_interval = my_stack_frame.locals.ptr[return_slot()];
+ self.unused_locals_store
+ .borrow_mut()
+ .entry(my_stack_frame.locals.body.owner)
+ .or_default()
+ .push(my_stack_frame.locals);
+ match my_stack_frame.destination {
+ None => {
+ self.code_stack = prev_code_stack;
+ self.stack_depth_limit += 1;
+ return Ok(return_interval.get(self)?.to_vec());
+ }
+ Some(bb) => {
+ // We don't support const promotion, so we can't truncate the stack yet.
+ let _ = my_stack_frame.prev_stack_ptr;
+ // self.stack.truncate(my_stack_frame.prev_stack_ptr);
+ current_block_idx = bb;
+ }
+ }
+ }
+ }
+
+ fn fill_locals_for_body(
+ &mut self,
+ body: &MirBody,
+ locals: &mut Locals,
+ args: impl Iterator<Item = IntervalOrOwned>,
+ ) -> Result<()> {
let mut remain_args = body.param_locals.len();
- for ((l, interval), value) in locals_ptr.iter().skip(1).zip(args) {
+ for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) {
locals.drop_flags.add_place(l.into());
- interval.write_from_bytes(self, &value)?;
+ match value {
+ IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?,
+ IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
+ }
if remain_args == 0 {
return Err(MirEvalError::TypeError("more arguments provided"));
}
@@ -742,101 +983,64 @@ impl Evaluator<'_> {
if remain_args > 0 {
return Err(MirEvalError::TypeError("not enough arguments provided"));
}
- loop {
- let current_block = &body.basic_blocks[current_block_idx];
- if let Some(x) = self.execution_limit.checked_sub(1) {
- self.execution_limit = x;
- } else {
- return Err(MirEvalError::ExecutionLimitExceeded);
- }
- for statement in &current_block.statements {
- match &statement.kind {
- StatementKind::Assign(l, r) => {
- let addr = self.place_addr(l, &locals)?;
- let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
- self.write_memory(addr, &result)?;
- locals.drop_flags.add_place(l.clone());
- }
- StatementKind::Deinit(_) => not_supported!("de-init statement"),
- StatementKind::StorageLive(_)
- | StatementKind::StorageDead(_)
- | StatementKind::Nop => (),
+ Ok(())
+ }
+
+ fn create_locals_for_body(
+ &mut self,
+ body: &Arc<MirBody>,
+ destination: Option<Interval>,
+ ) -> Result<(Locals, usize)> {
+ let mut locals =
+ match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
+ None => Locals {
+ ptr: ArenaMap::new(),
+ body: body.clone(),
+ drop_flags: DropFlags::default(),
+ },
+ Some(mut l) => {
+ l.drop_flags.clear();
+ l.body = body.clone();
+ l
}
- }
- let Some(terminator) = current_block.terminator.as_ref() else {
- not_supported!("block without terminator");
};
- match &terminator.kind {
- TerminatorKind::Goto { target } => {
- current_block_idx = *target;
- }
- TerminatorKind::Call {
- func,
- args,
- destination,
- target,
- cleanup: _,
- from_hir_call: _,
- } => {
- let destination_interval = self.place_interval(destination, &locals)?;
- let fn_ty = self.operand_ty(func, &locals)?;
- let args = args
- .iter()
- .map(|x| self.operand_ty_and_eval(x, &mut locals))
- .collect::<Result<Vec<_>>>()?;
- match &fn_ty.data(Interner).kind {
- TyKind::Function(_) => {
- let bytes = self.eval_operand(func, &mut locals)?;
- self.exec_fn_pointer(
- bytes,
- destination_interval,
- &args,
- &locals,
- terminator.span,
- )?;
- }
- TyKind::FnDef(def, generic_args) => {
- self.exec_fn_def(
- *def,
- generic_args,
- destination_interval,
- &args,
- &locals,
- terminator.span,
- )?;
- }
- x => not_supported!("unknown function type {x:?}"),
+ let stack_size = {
+ let mut stack_ptr = self.stack.len();
+ for (id, it) in body.locals.iter() {
+ if id == return_slot() {
+ if let Some(destination) = destination {
+ locals.ptr.insert(id, destination);
+ continue;
}
- locals.drop_flags.add_place(destination.clone());
- current_block_idx = target.expect("broken mir, function without target");
- }
- TerminatorKind::SwitchInt { discr, targets } => {
- let val = u128::from_le_bytes(pad16(
- self.eval_operand(discr, &mut locals)?.get(&self)?,
- false,
- ));
- current_block_idx = targets.target_for_value(val);
- }
- TerminatorKind::Return => {
- self.stack_depth_limit += 1;
- return Ok(locals.ptr[return_slot()].get(self)?.to_vec());
}
- TerminatorKind::Unreachable => {
- return Err(MirEvalError::UndefinedBehavior("unreachable executed".to_owned()));
- }
- TerminatorKind::Drop { place, target, unwind: _ } => {
- self.drop_place(place, &mut locals, terminator.span)?;
- current_block_idx = *target;
+ let (size, align) = self.size_align_of_sized(
+ &it.ty,
+ &locals,
+ "no unsized local in extending stack",
+ )?;
+ while stack_ptr % align != 0 {
+ stack_ptr += 1;
}
- _ => not_supported!("unknown terminator"),
+ let my_ptr = stack_ptr;
+ stack_ptr += size;
+ locals.ptr.insert(id, Interval { addr: Stack(my_ptr), size });
}
+ stack_ptr - self.stack.len()
+ };
+ let prev_stack_pointer = self.stack.len();
+ if stack_size > self.memory_limit {
+ return Err(MirEvalError::Panic(format!(
+ "Stack overflow. Tried to grow stack to {stack_size} bytes"
+ )));
}
+ self.stack.extend(iter::repeat(0).take(stack_size));
+ Ok((locals, prev_stack_pointer))
}
- fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned> {
+ fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<IntervalOrOwned> {
use IntervalOrOwned::*;
Ok(match r {
- Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
+ Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
Rvalue::Ref(_, p) => {
let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
let mut r = addr.to_bytes();
@@ -881,9 +1085,9 @@ impl Evaluator<'_> {
c[0] = 1 - c[0];
} else {
match op {
- UnOp::Not => c.iter_mut().for_each(|x| *x = !*x),
+ UnOp::Not => c.iter_mut().for_each(|it| *it = !*it),
UnOp::Neg => {
- c.iter_mut().for_each(|x| *x = !*x);
+ c.iter_mut().for_each(|it| *it = !*it);
for k in c.iter_mut() {
let o;
(*k, o) = k.overflowing_add(1);
@@ -948,8 +1152,8 @@ impl Evaluator<'_> {
};
Owned(r.to_le_bytes().into())
}
- x => not_supported!(
- "invalid binop {x:?} on floating point operators"
+ it => not_supported!(
+ "invalid binop {it:?} on floating point operators"
),
}
}
@@ -976,8 +1180,8 @@ impl Evaluator<'_> {
};
Owned(r.to_le_bytes().into())
}
- x => not_supported!(
- "invalid binop {x:?} on floating point operators"
+ it => not_supported!(
+ "invalid binop {it:?} on floating point operators"
),
}
}
@@ -1034,13 +1238,18 @@ impl Evaluator<'_> {
BinOp::Shr => l128.checked_shr(shift_amount),
_ => unreachable!(),
};
+ if shift_amount as usize >= lc.len() * 8 {
+ return Err(MirEvalError::Panic(format!(
+ "Overflow in {op:?}"
+ )));
+ }
if let Some(r) = r {
break 'b r;
}
};
return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
};
- check_overflow(r)?
+ Owned(r.to_le_bytes()[..lc.len()].to_vec())
}
BinOp::Offset => not_supported!("offset binop"),
}
@@ -1049,64 +1258,15 @@ impl Evaluator<'_> {
Rvalue::Discriminant(p) => {
let ty = self.place_ty(p, locals)?;
let bytes = self.eval_place(p, locals)?.get(&self)?;
- let layout = self.layout(&ty)?;
- let enum_id = 'b: {
- match ty.kind(Interner) {
- TyKind::Adt(e, _) => match e.0 {
- AdtId::EnumId(e) => break 'b e,
- _ => (),
- },
- _ => (),
- }
- return Ok(Owned(0u128.to_le_bytes().to_vec()));
- };
- match &layout.variants {
- Variants::Single { index } => {
- let r = self.const_eval_discriminant(EnumVariantId {
- parent: enum_id,
- local_id: index.0,
- })?;
- Owned(r.to_le_bytes().to_vec())
- }
- Variants::Multiple { tag, tag_encoding, variants, .. } => {
- let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
- not_supported!("missing target data layout");
- };
- let size = tag.size(&*target_data_layout).bytes_usize();
- let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
- match tag_encoding {
- TagEncoding::Direct => {
- let tag = &bytes[offset..offset + size];
- Owned(pad16(tag, false).to_vec())
- }
- TagEncoding::Niche { untagged_variant, niche_start, .. } => {
- let tag = &bytes[offset..offset + size];
- let candidate_tag = i128::from_le_bytes(pad16(tag, false))
- .wrapping_sub(*niche_start as i128)
- as usize;
- let variant = variants
- .iter_enumerated()
- .map(|(x, _)| x)
- .filter(|x| x != untagged_variant)
- .nth(candidate_tag)
- .unwrap_or(*untagged_variant)
- .0;
- let result = self.const_eval_discriminant(EnumVariantId {
- parent: enum_id,
- local_id: variant,
- })?;
- Owned(result.to_le_bytes().to_vec())
- }
- }
- }
- }
+ let result = self.compute_discriminant(ty, bytes)?;
+ Owned(result.to_le_bytes().to_vec())
}
- Rvalue::Repeat(x, len) => {
+ Rvalue::Repeat(it, len) => {
let len = match try_const_usize(self.db, &len) {
- Some(x) => x as usize,
+ Some(it) => it as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
- let val = self.eval_operand(x, locals)?.get(self)?;
+ let val = self.eval_operand(it, locals)?.get(self)?;
let size = len * val.len();
Owned(val.iter().copied().cycle().take(size).collect())
}
@@ -1115,20 +1275,20 @@ impl Evaluator<'_> {
let Some((size, align)) = self.size_align_of(ty, locals)? else {
not_supported!("unsized box initialization");
};
- let addr = self.heap_allocate(size, align);
+ let addr = self.heap_allocate(size, align)?;
Owned(addr.to_bytes())
}
Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
Rvalue::Aggregate(kind, values) => {
let values = values
.iter()
- .map(|x| self.eval_operand(x, locals))
+ .map(|it| self.eval_operand(it, locals))
.collect::<Result<Vec<_>>>()?;
match kind {
AggregateKind::Array(_) => {
let mut r = vec![];
- for x in values {
- let value = x.get(&self)?;
+ for it in values {
+ let value = it.get(&self)?;
r.extend(value);
}
Owned(r)
@@ -1139,11 +1299,12 @@ impl Evaluator<'_> {
layout.size.bytes_usize(),
&layout,
None,
- values.iter().map(|&x| x.into()),
+ values.iter().map(|&it| it.into()),
)?)
}
- AggregateKind::Union(x, f) => {
- let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
+ AggregateKind::Union(it, f) => {
+ let layout =
+ self.layout_adt((*it).into(), Substitution::empty(Interner))?;
let offset = layout
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
@@ -1153,14 +1314,14 @@ impl Evaluator<'_> {
result[offset..offset + op.len()].copy_from_slice(op);
Owned(result)
}
- AggregateKind::Adt(x, subst) => {
+ AggregateKind::Adt(it, subst) => {
let (size, variant_layout, tag) =
- self.layout_of_variant(*x, subst.clone(), locals)?;
+ self.layout_of_variant(*it, subst.clone(), locals)?;
Owned(self.make_by_layout(
size,
&variant_layout,
tag,
- values.iter().map(|&x| x.into()),
+ values.iter().map(|&it| it.into()),
)?)
}
AggregateKind::Closure(ty) => {
@@ -1169,7 +1330,7 @@ impl Evaluator<'_> {
layout.size.bytes_usize(),
&layout,
None,
- values.iter().map(|&x| x.into()),
+ values.iter().map(|&it| it.into()),
)?)
}
}
@@ -1179,7 +1340,7 @@ impl Evaluator<'_> {
PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {
let current_ty = self.operand_ty(operand, locals)?;
if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
- &current_ty.data(Interner).kind
+ &current_ty.kind(Interner)
{
let id = self.vtable_map.id(current_ty);
let ptr_size = self.ptr_size();
@@ -1229,21 +1390,75 @@ impl Evaluator<'_> {
})
}
+ fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result<i128> {
+ let layout = self.layout(&ty)?;
+ let enum_id = 'b: {
+ match ty.kind(Interner) {
+ TyKind::Adt(e, _) => match e.0 {
+ AdtId::EnumId(e) => break 'b e,
+ _ => (),
+ },
+ _ => (),
+ }
+ return Ok(0);
+ };
+ match &layout.variants {
+ Variants::Single { index } => {
+ let r = self.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id: index.0,
+ })?;
+ Ok(r)
+ }
+ Variants::Multiple { tag, tag_encoding, variants, .. } => {
+ let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
+ not_supported!("missing target data layout");
+ };
+ let size = tag.size(&*target_data_layout).bytes_usize();
+ let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
+ match tag_encoding {
+ TagEncoding::Direct => {
+ let tag = &bytes[offset..offset + size];
+ Ok(i128::from_le_bytes(pad16(tag, false)))
+ }
+ TagEncoding::Niche { untagged_variant, niche_start, .. } => {
+ let tag = &bytes[offset..offset + size];
+ let candidate_tag = i128::from_le_bytes(pad16(tag, false))
+ .wrapping_sub(*niche_start as i128)
+ as usize;
+ let variant = variants
+ .iter_enumerated()
+ .map(|(it, _)| it)
+ .filter(|it| it != untagged_variant)
+ .nth(candidate_tag)
+ .unwrap_or(*untagged_variant)
+ .0;
+ let result = self.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id: variant,
+ })?;
+ Ok(result)
+ }
+ }
+ }
+ }
+ }
+
fn coerce_unsized_look_through_fields<T>(
&self,
ty: &Ty,
goal: impl Fn(&TyKind) -> Option<T>,
) -> Result<T> {
let kind = ty.kind(Interner);
- if let Some(x) = goal(kind) {
- return Ok(x);
+ if let Some(it) = goal(kind) {
+ return Ok(it);
}
if let TyKind::Adt(id, subst) = kind {
if let AdtId::StructId(struct_id) = id.0 {
let field_types = self.db.field_types(struct_id.into());
let mut field_types = field_types.iter();
if let Some(ty) =
- field_types.next().map(|x| x.1.clone().substitute(Interner, subst))
+ field_types.next().map(|it| it.1.clone().substitute(Interner, subst))
{
return self.coerce_unsized_look_through_fields(&ty, goal);
}
@@ -1258,66 +1473,99 @@ impl Evaluator<'_> {
current_ty: &Ty,
target_ty: &Ty,
) -> Result<IntervalOrOwned> {
- use IntervalOrOwned::*;
- fn for_ptr(x: &TyKind) -> Option<Ty> {
- match x {
+ fn for_ptr(it: &TyKind) -> Option<Ty> {
+ match it {
TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()),
_ => None,
}
}
- Ok(match self.coerce_unsized_look_through_fields(target_ty, for_ptr)? {
- ty => match &ty.data(Interner).kind {
- TyKind::Slice(_) => {
- match self.coerce_unsized_look_through_fields(current_ty, for_ptr)? {
- ty => match &ty.data(Interner).kind {
- TyKind::Array(_, size) => {
- let len = match try_const_usize(self.db, size) {
- None => not_supported!(
- "unevaluatble len of array in coerce unsized"
- ),
- Some(x) => x as usize,
- };
- let mut r = Vec::with_capacity(16);
- let addr = addr.get(self)?;
- r.extend(addr.iter().copied());
- r.extend(len.to_le_bytes().into_iter());
- Owned(r)
- }
- t => {
- not_supported!("slice unsizing from non array type {t:?}")
- }
- },
- }
+ let target_ty = self.coerce_unsized_look_through_fields(target_ty, for_ptr)?;
+ let current_ty = self.coerce_unsized_look_through_fields(current_ty, for_ptr)?;
+
+ self.unsizing_ptr_from_addr(target_ty, current_ty, addr)
+ }
+
+ /// Adds metadata to the address and create the fat pointer result of the unsizing operation.
+ fn unsizing_ptr_from_addr(
+ &mut self,
+ target_ty: Ty,
+ current_ty: Ty,
+ addr: Interval,
+ ) -> Result<IntervalOrOwned> {
+ use IntervalOrOwned::*;
+ Ok(match &target_ty.kind(Interner) {
+ TyKind::Slice(_) => match &current_ty.kind(Interner) {
+ TyKind::Array(_, size) => {
+ let len = match try_const_usize(self.db, size) {
+ None => {
+ not_supported!("unevaluatble len of array in coerce unsized")
+ }
+ Some(it) => it as usize,
+ };
+ let mut r = Vec::with_capacity(16);
+ let addr = addr.get(self)?;
+ r.extend(addr.iter().copied());
+ r.extend(len.to_le_bytes().into_iter());
+ Owned(r)
}
- TyKind::Dyn(_) => match &current_ty.data(Interner).kind {
- TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
- let vtable = self.vtable_map.id(ty.clone());
- let mut r = Vec::with_capacity(16);
- let addr = addr.get(self)?;
- r.extend(addr.iter().copied());
- r.extend(vtable.to_le_bytes().into_iter());
- Owned(r)
+ t => {
+ not_supported!("slice unsizing from non array type {t:?}")
+ }
+ },
+ TyKind::Dyn(_) => {
+ let vtable = self.vtable_map.id(current_ty.clone());
+ let mut r = Vec::with_capacity(16);
+ let addr = addr.get(self)?;
+ r.extend(addr.iter().copied());
+ r.extend(vtable.to_le_bytes().into_iter());
+ Owned(r)
+ }
+ TyKind::Adt(id, target_subst) => match &current_ty.kind(Interner) {
+ TyKind::Adt(current_id, current_subst) => {
+ if id != current_id {
+ not_supported!("unsizing struct with different type");
}
- _ => not_supported!("dyn unsizing from non pointers"),
- },
- _ => not_supported!("unknown unsized cast"),
+ let id = match id.0 {
+ AdtId::StructId(s) => s,
+ AdtId::UnionId(_) => not_supported!("unsizing unions"),
+ AdtId::EnumId(_) => not_supported!("unsizing enums"),
+ };
+ let Some((last_field, _)) =
+ self.db.struct_data(id).variant_data.fields().iter().rev().next()
+ else {
+ not_supported!("unsizing struct without field");
+ };
+ let target_last_field = self.db.field_types(id.into())[last_field]
+ .clone()
+ .substitute(Interner, target_subst);
+ let current_last_field = self.db.field_types(id.into())[last_field]
+ .clone()
+ .substitute(Interner, current_subst);
+ return self.unsizing_ptr_from_addr(
+ target_last_field,
+ current_last_field,
+ addr,
+ );
+ }
+ _ => not_supported!("unsizing struct with non adt type"),
},
+ _ => not_supported!("unknown unsized cast"),
})
}
fn layout_of_variant(
&mut self,
- x: VariantId,
+ it: VariantId,
subst: Substitution,
- locals: &Locals<'_>,
+ locals: &Locals,
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
- let adt = x.adt_id();
+ let adt = it.adt_id();
if let DefWithBodyId::VariantId(f) = locals.body.owner {
- if let VariantId::EnumVariantId(x) = x {
+ if let VariantId::EnumVariantId(it) = it {
if AdtId::from(f.parent) == adt {
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
// infinite sized type errors) we use a dummy layout
- let i = self.const_eval_discriminant(x)?;
+ let i = self.const_eval_discriminant(it)?;
return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
}
}
@@ -1330,8 +1578,8 @@ impl Evaluator<'_> {
.db
.target_data_layout(self.crate_id)
.ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
- let enum_variant_id = match x {
- VariantId::EnumVariantId(x) => x,
+ let enum_variant_id = match it {
+ VariantId::EnumVariantId(it) => it,
_ => not_supported!("multi variant layout for non-enums"),
};
let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id);
@@ -1345,8 +1593,8 @@ impl Evaluator<'_> {
} else {
discriminant = (variants
.iter_enumerated()
- .filter(|(x, _)| x != untagged_variant)
- .position(|(x, _)| x == rustc_enum_variant_idx)
+ .filter(|(it, _)| it != untagged_variant)
+ .position(|(it, _)| it == rustc_enum_variant_idx)
.unwrap() as i128)
.wrapping_add(*niche_start as i128);
true
@@ -1379,18 +1627,24 @@ impl Evaluator<'_> {
) -> Result<Vec<u8>> {
let mut result = vec![0; size];
if let Some((offset, size, value)) = tag {
- result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
+ match result.get_mut(offset..offset + size) {
+ Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
+ None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())),
+ }
}
for (i, op) in values.enumerate() {
let offset = variant_layout.fields.offset(i).bytes_usize();
let op = op.get(&self)?;
- result[offset..offset + op.len()].copy_from_slice(op);
+ match result.get_mut(offset..offset + op.len()) {
+ Some(it) => it.copy_from_slice(op),
+ None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())),
+ }
}
Ok(result)
}
- fn eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval> {
- Ok(match x {
+ fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> {
+ Ok(match it {
Operand::Copy(p) | Operand::Move(p) => {
locals.drop_flags.remove_place(p);
self.eval_place(p, locals)?
@@ -1399,61 +1653,66 @@ impl Evaluator<'_> {
let addr = self.eval_static(*st, locals)?;
Interval::new(addr, self.ptr_size())
}
- Operand::Constant(konst) => {
- let data = &konst.data(Interner);
- match &data.value {
- chalk_ir::ConstValue::BoundVar(_) => not_supported!("bound var constant"),
- chalk_ir::ConstValue::InferenceVar(_) => {
- not_supported!("inference var constant")
- }
- chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"),
- chalk_ir::ConstValue::Concrete(c) => {
- self.allocate_const_in_heap(c, &data.ty, locals, konst)?
- }
- }
- }
+ Operand::Constant(konst) => self.allocate_const_in_heap(locals, konst)?,
})
}
- fn allocate_const_in_heap(
- &mut self,
- c: &chalk_ir::ConcreteConst<Interner>,
- ty: &Ty,
- locals: &Locals<'_>,
- konst: &chalk_ir::Const<Interner>,
- ) -> Result<Interval> {
- Ok(match &c.interned {
- ConstScalar::Bytes(v, memory_map) => {
- let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
- let patch_map = memory_map.transform_addresses(|b| {
- let addr = self.heap_allocate(b.len(), 1); // FIXME: align is wrong
- self.write_memory(addr, b)?;
- Ok(addr.to_usize())
- })?;
- let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
- if size != v.len() {
- // Handle self enum
- if size == 16 && v.len() < 16 {
- v = Cow::Owned(pad16(&v, false).to_vec());
- } else if size < 16 && v.len() == 16 {
- v = Cow::Owned(v[0..size].to_vec());
- } else {
- return Err(MirEvalError::InvalidConst(konst.clone()));
+ fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
+ let ty = &konst.data(Interner).ty;
+ let chalk_ir::ConstValue::Concrete(c) = &konst.data(Interner).value else {
+ not_supported!("evaluating non concrete constant");
+ };
+ let result_owner;
+ let (v, memory_map) = match &c.interned {
+ ConstScalar::Bytes(v, mm) => (v, mm),
+ ConstScalar::UnevaluatedConst(const_id, subst) => 'b: {
+ let mut const_id = *const_id;
+ let mut subst = subst.clone();
+ if let hir_def::GeneralConstId::ConstId(c) = const_id {
+ let (c, s) = lookup_impl_const(self.db, self.trait_env.clone(), c, subst);
+ const_id = hir_def::GeneralConstId::ConstId(c);
+ subst = s;
+ }
+ result_owner = self
+ .db
+ .const_eval(const_id.into(), subst, Some(self.trait_env.clone()))
+ .map_err(|e| {
+ let name = const_id.name(self.db.upcast());
+ MirEvalError::ConstEvalError(name, Box::new(e))
+ })?;
+ if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value {
+ if let ConstScalar::Bytes(v, mm) = &c.interned {
+ break 'b (v, mm);
}
}
- let addr = self.heap_allocate(size, align);
- self.write_memory(addr, &v)?;
- self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?;
- Interval::new(addr, size)
- }
- ConstScalar::UnevaluatedConst(..) => {
- not_supported!("unevaluated const present in monomorphized mir");
+ not_supported!("unevaluatable constant");
}
ConstScalar::Unknown => not_supported!("evaluating unknown const"),
- })
+ };
+ let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
+ let patch_map = memory_map.transform_addresses(|b, align| {
+ let addr = self.heap_allocate(b.len(), align)?;
+ self.write_memory(addr, b)?;
+ Ok(addr.to_usize())
+ })?;
+ let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
+ if size != v.len() {
+ // Handle self enum
+ if size == 16 && v.len() < 16 {
+ v = Cow::Owned(pad16(&v, false).to_vec());
+ } else if size < 16 && v.len() == 16 {
+ v = Cow::Owned(v[0..size].to_vec());
+ } else {
+ return Err(MirEvalError::InvalidConst(konst.clone()));
+ }
+ }
+ let addr = self.heap_allocate(size, align)?;
+ self.write_memory(addr, &v)?;
+ self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?;
+ Ok(Interval::new(addr, size))
}
- fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
+ fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<Interval> {
let addr = self.place_addr(p, locals)?;
Ok(Interval::new(
addr,
@@ -1466,11 +1725,11 @@ impl Evaluator<'_> {
return Ok(&[]);
}
let (mem, pos) = match addr {
- Stack(x) => (&self.stack, x),
- Heap(x) => (&self.heap, x),
- Invalid(x) => {
+ Stack(it) => (&self.stack, it),
+ Heap(it) => (&self.heap, it),
+ Invalid(it) => {
return Err(MirEvalError::UndefinedBehavior(format!(
- "read invalid memory address {x} with size {size}"
+ "read invalid memory address {it} with size {size}"
)));
}
};
@@ -1478,28 +1737,35 @@ impl Evaluator<'_> {
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string()))
}
- fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
- if r.is_empty() {
- return Ok(());
- }
+ fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> {
let (mem, pos) = match addr {
- Stack(x) => (&mut self.stack, x),
- Heap(x) => (&mut self.heap, x),
- Invalid(x) => {
+ Stack(it) => (&mut self.stack, it),
+ Heap(it) => (&mut self.heap, it),
+ Invalid(it) => {
return Err(MirEvalError::UndefinedBehavior(format!(
- "write invalid memory address {x} with content {r:?}"
+ "write invalid memory address {it} with size {size}"
)));
}
};
- mem.get_mut(pos..pos + r.len())
- .ok_or_else(|| {
- MirEvalError::UndefinedBehavior("out of bound memory write".to_string())
- })?
- .copy_from_slice(r);
+ Ok(mem.get_mut(pos..pos + size).ok_or_else(|| {
+ MirEvalError::UndefinedBehavior("out of bound memory write".to_string())
+ })?)
+ }
+
+ fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
+ if r.is_empty() {
+ return Ok(());
+ }
+ self.write_memory_using_ref(addr, r.len())?.copy_from_slice(r);
Ok(())
}
- fn size_align_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<(usize, usize)>> {
+ fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
+ if let Some(layout) = self.layout_cache.borrow().get(ty) {
+ return Ok(layout
+ .is_sized()
+ .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
+ }
if let DefWithBodyId::VariantId(f) = locals.body.owner {
if let Some((adt, _)) = ty.as_adt() {
if AdtId::from(f.parent) == adt {
@@ -1523,39 +1789,61 @@ impl Evaluator<'_> {
/// A version of `self.size_of` which returns error if the type is unsized. `what` argument should
/// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
- fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> {
+ fn size_of_sized(&self, ty: &Ty, locals: &Locals, what: &'static str) -> Result<usize> {
match self.size_align_of(ty, locals)? {
- Some(x) => Ok(x.0),
+ Some(it) => Ok(it.0),
None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
}
}
- fn heap_allocate(&mut self, size: usize, _align: usize) -> Address {
+ /// A version of `self.size_align_of` which returns error if the type is unsized. `what` argument should
+ /// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
+ fn size_align_of_sized(
+ &self,
+ ty: &Ty,
+ locals: &Locals,
+ what: &'static str,
+ ) -> Result<(usize, usize)> {
+ match self.size_align_of(ty, locals)? {
+ Some(it) => Ok(it),
+ None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
+ }
+ }
+
+ fn heap_allocate(&mut self, size: usize, align: usize) -> Result<Address> {
+ if !align.is_power_of_two() || align > 10000 {
+ return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid")));
+ }
+ while self.heap.len() % align != 0 {
+ self.heap.push(0);
+ }
+ if size.checked_add(self.heap.len()).map_or(true, |x| x > self.memory_limit) {
+ return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed")));
+ }
let pos = self.heap.len();
self.heap.extend(iter::repeat(0).take(size));
- Address::Heap(pos)
+ Ok(Address::Heap(pos))
}
fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
- use LangItem::*;
- let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else {
- return None;
- };
- let l = lang_attr(self.db.upcast(), parent)?;
- match l {
- FnOnce => Some(FnTrait::FnOnce),
- FnMut => Some(FnTrait::FnMut),
- Fn => Some(FnTrait::Fn),
- _ => None,
+ let def = Some(def);
+ if def == self.cached_fn_trait_func {
+ Some(FnTrait::Fn)
+ } else if def == self.cached_fn_mut_trait_func {
+ Some(FnTrait::FnMut)
+ } else if def == self.cached_fn_once_trait_func {
+ Some(FnTrait::FnOnce)
+ } else {
+ None
}
}
- fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> {
+ fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals) -> Result<MemoryMap> {
fn rec(
this: &Evaluator<'_>,
bytes: &[u8],
ty: &Ty,
- locals: &Locals<'_>,
+ locals: &Locals,
mm: &mut MemoryMap,
) -> Result<()> {
match ty.kind(Interner) {
@@ -1602,6 +1890,17 @@ impl Evaluator<'_> {
}
}
}
+ chalk_ir::TyKind::Array(inner, len) => {
+ let len = match try_const_usize(this.db, &len) {
+ Some(it) => it as usize,
+ None => not_supported!("non evaluatable array len in patching addresses"),
+ };
+ let size = this.size_of_sized(inner, locals, "inner of array")?;
+ for i in 0..len {
+ let offset = i * size;
+ rec(this, &bytes[offset..offset + size], inner, locals, mm)?;
+ }
+ }
chalk_ir::TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
for (id, ty) in subst.iter(Interner).enumerate() {
@@ -1628,9 +1927,13 @@ impl Evaluator<'_> {
}
AdtId::EnumId(e) => {
let layout = this.layout(ty)?;
- if let Some((v, l)) =
- detect_variant_from_bytes(&layout, this.db, this.crate_id, bytes, e)
- {
+ if let Some((v, l)) = detect_variant_from_bytes(
+ &layout,
+ this.db,
+ this.trait_env.clone(),
+ bytes,
+ e,
+ ) {
let data = &this.db.enum_data(e).variants[v].variant_data;
let field_types = this
.db
@@ -1661,7 +1964,7 @@ impl Evaluator<'_> {
old_vtable: &VTableMap,
addr: Address,
ty: &Ty,
- locals: &Locals<'_>,
+ locals: &Locals,
) -> Result<()> {
// FIXME: support indirect references
let layout = self.layout(ty)?;
@@ -1672,14 +1975,14 @@ impl Evaluator<'_> {
match size {
Some(_) => {
let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
- if let Some(x) = patch_map.get(&current) {
- self.write_memory(addr, &x.to_le_bytes())?;
+ if let Some(it) = patch_map.get(&current) {
+ self.write_memory(addr, &it.to_le_bytes())?;
}
}
None => {
let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
- if let Some(x) = patch_map.get(&current) {
- self.write_memory(addr, &x.to_le_bytes())?;
+ if let Some(it) = patch_map.get(&current) {
+ self.write_memory(addr, &it.to_le_bytes())?;
}
}
}
@@ -1706,10 +2009,31 @@ impl Evaluator<'_> {
AdtId::UnionId(_) => (),
AdtId::EnumId(_) => (),
},
+ TyKind::Tuple(_, subst) => {
+ for (id, ty) in subst.iter(Interner).enumerate() {
+ let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
+ let offset = layout.fields.offset(id).bytes_usize();
+ self.patch_addresses(patch_map, old_vtable, addr.offset(offset), ty, locals)?;
+ }
+ }
+ TyKind::Array(inner, len) => {
+ let len = match try_const_usize(self.db, &len) {
+ Some(it) => it as usize,
+ None => not_supported!("non evaluatable array len in patching addresses"),
+ };
+ let size = self.size_of_sized(inner, locals, "inner of array")?;
+ for i in 0..len {
+ self.patch_addresses(
+ patch_map,
+ old_vtable,
+ addr.offset(i * size),
+ inner,
+ locals,
+ )?;
+ }
+ }
TyKind::AssociatedType(_, _)
| TyKind::Scalar(_)
- | TyKind::Tuple(_, _)
- | TyKind::Array(_, _)
| TyKind::Slice(_)
| TyKind::Raw(_, _)
| TyKind::OpaqueType(_, _)
@@ -1735,21 +2059,21 @@ impl Evaluator<'_> {
bytes: Interval,
destination: Interval,
args: &[IntervalAndTy],
- locals: &Locals<'_>,
+ locals: &Locals,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?.clone();
- match &next_ty.data(Interner).kind {
+ match &next_ty.kind(Interner) {
TyKind::FnDef(def, generic_args) => {
- self.exec_fn_def(*def, generic_args, destination, args, &locals, span)?;
+ self.exec_fn_def(*def, generic_args, destination, args, &locals, target_bb, span)
}
TyKind::Closure(id, subst) => {
- self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)?;
+ self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
}
- _ => return Err(MirEvalError::TypeError("function pointer to non function")),
+ _ => Err(MirEvalError::TypeError("function pointer to non function")),
}
- Ok(())
}
fn exec_closure(
@@ -1759,9 +2083,9 @@ impl Evaluator<'_> {
generic_args: &Substitution,
destination: Interval,
args: &[IntervalAndTy],
- locals: &Locals<'_>,
+ locals: &Locals,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let mir_body = self
.db
.monomorphized_mir_body_for_closure(
@@ -1769,7 +2093,7 @@ impl Evaluator<'_> {
generic_args.clone(),
self.trait_env.clone(),
)
- .map_err(|x| MirEvalError::MirLowerErrorForClosure(closure, x))?;
+ .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
{
closure_data.addr.to_bytes()
@@ -1777,12 +2101,18 @@ impl Evaluator<'_> {
closure_data.get(self)?.to_owned()
};
let arg_bytes = iter::once(Ok(closure_data))
- .chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned())))
+ .chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned())))
.collect::<Result<Vec<_>>>()?;
- let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter()).map_err(|e| {
- MirEvalError::InFunction(Either::Right(closure), Box::new(e), span, locals.body.owner)
- })?;
- destination.write_from_bytes(self, &bytes)
+ let bytes = self
+ .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
+ .map_err(|e| {
+ MirEvalError::InFunction(
+ Box::new(e),
+ vec![(Either::Right(closure), span, locals.body.owner)],
+ )
+ })?;
+ destination.write_from_bytes(self, &bytes)?;
+ Ok(None)
}
fn exec_fn_def(
@@ -1791,18 +2121,34 @@ impl Evaluator<'_> {
generic_args: &Substitution,
destination: Interval,
args: &[IntervalAndTy],
- locals: &Locals<'_>,
+ locals: &Locals,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let def: CallableDefId = from_chalk(self.db, def);
let generic_args = generic_args.clone();
match def {
CallableDefId::FunctionId(def) => {
if let Some(_) = self.detect_fn_trait(def) {
- self.exec_fn_trait(&args, destination, locals, span)?;
- return Ok(());
+ return self.exec_fn_trait(
+ def,
+ args,
+ generic_args,
+ locals,
+ destination,
+ target_bb,
+ span,
+ );
}
- self.exec_fn_with_args(def, args, generic_args, locals, destination, span)?;
+ self.exec_fn_with_args(
+ def,
+ args,
+ generic_args,
+ locals,
+ destination,
+ target_bb,
+ span,
+ )
}
CallableDefId::StructId(id) => {
let (size, variant_layout, tag) =
@@ -1811,9 +2157,10 @@ impl Evaluator<'_> {
size,
&variant_layout,
tag,
- args.iter().map(|x| x.interval.into()),
+ args.iter().map(|it| it.interval.into()),
)?;
destination.write_from_bytes(self, &result)?;
+ Ok(None)
}
CallableDefId::EnumVariantId(id) => {
let (size, variant_layout, tag) =
@@ -1822,12 +2169,46 @@ impl Evaluator<'_> {
size,
&variant_layout,
tag,
- args.iter().map(|x| x.interval.into()),
+ args.iter().map(|it| it.interval.into()),
)?;
destination.write_from_bytes(self, &result)?;
+ Ok(None)
}
}
- Ok(())
+ }
+
+ fn get_mir_or_dyn_index(
+ &self,
+ def: FunctionId,
+ generic_args: Substitution,
+ locals: &Locals,
+ span: MirSpan,
+ ) -> Result<MirOrDynIndex> {
+ let pair = (def, generic_args);
+ if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
+ return Ok(r.clone());
+ }
+ let (def, generic_args) = pair;
+ let r = if let Some(self_ty_idx) =
+ is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
+ {
+ MirOrDynIndex::Dyn(self_ty_idx)
+ } else {
+ let (imp, generic_args) =
+ self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
+ let mir_body = self
+ .db
+ .monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())
+ .map_err(|e| {
+ MirEvalError::InFunction(
+ Box::new(MirEvalError::MirLowerError(imp, e)),
+ vec![(Either::Left(imp), span, locals.body.owner)],
+ )
+ })?;
+ MirOrDynIndex::Mir(mir_body)
+ };
+ self.mir_or_dyn_index_cache.borrow_mut().insert((def, generic_args), r.clone());
+ Ok(r)
}
fn exec_fn_with_args(
@@ -1835,10 +2216,11 @@ impl Evaluator<'_> {
def: FunctionId,
args: &[IntervalAndTy],
generic_args: Substitution,
- locals: &Locals<'_>,
+ locals: &Locals,
destination: Interval,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
if self.detect_and_exec_special_function(
def,
args,
@@ -1847,85 +2229,96 @@ impl Evaluator<'_> {
destination,
span,
)? {
- return Ok(());
+ return Ok(None);
}
- let arg_bytes =
- args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
- if let Some(self_ty_idx) =
- is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
- {
- // In the layout of current possible receiver, which at the moment of writing this code is one of
- // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
- // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
- // the type.
- let ty =
- self.vtable_map.ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?;
- let mut args_for_target = args.to_vec();
- args_for_target[0] = IntervalAndTy {
- interval: args_for_target[0].interval.slice(0..self.ptr_size()),
- ty: ty.clone(),
- };
- let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
- let generics_for_target =
- Substitution::from_iter(
+ let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval));
+ match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
+ MirOrDynIndex::Dyn(self_ty_idx) => {
+ // In the layout of current possible receiver, which at the moment of writing this code is one of
+ // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
+ // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
+ // the type.
+ let first_arg = arg_bytes.clone().next().unwrap();
+ let first_arg = first_arg.get(self)?;
+ let ty = self
+ .vtable_map
+ .ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
+ let mut args_for_target = args.to_vec();
+ args_for_target[0] = IntervalAndTy {
+ interval: args_for_target[0].interval.slice(0..self.ptr_size()),
+ ty: ty.clone(),
+ };
+ let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
+ let generics_for_target = Substitution::from_iter(
Interner,
- generic_args.iter(Interner).enumerate().map(|(i, x)| {
+ generic_args.iter(Interner).enumerate().map(|(i, it)| {
if i == self_ty_idx {
&ty
} else {
- x
+ it
}
}),
);
- return self.exec_fn_with_args(
- def,
- &args_for_target,
- generics_for_target,
+ return self.exec_fn_with_args(
+ def,
+ &args_for_target,
+ generics_for_target,
+ locals,
+ destination,
+ target_bb,
+ span,
+ );
+ }
+ MirOrDynIndex::Mir(body) => self.exec_looked_up_function(
+ body,
locals,
- destination,
+ def,
+ arg_bytes,
span,
- );
+ destination,
+ target_bb,
+ ),
}
- let (imp, generic_args) =
- lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args);
- self.exec_looked_up_function(generic_args, locals, imp, arg_bytes, span, destination)
}
fn exec_looked_up_function(
&mut self,
- generic_args: Substitution,
- locals: &Locals<'_>,
- imp: FunctionId,
- arg_bytes: Vec<Vec<u8>>,
+ mir_body: Arc<MirBody>,
+ locals: &Locals,
+ def: FunctionId,
+ arg_bytes: impl Iterator<Item = IntervalOrOwned>,
span: MirSpan,
destination: Interval,
- ) -> Result<()> {
- let def = imp.into();
- let mir_body = self
- .db
- .monomorphized_mir_body(def, generic_args, self.trait_env.clone())
- .map_err(|e| {
+ target_bb: Option<BasicBlockId>,
+ ) -> Result<Option<StackFrame>> {
+ Ok(if let Some(target_bb) = target_bb {
+ let (mut locals, prev_stack_ptr) =
+ self.create_locals_for_body(&mir_body, Some(destination))?;
+ self.fill_locals_for_body(&mir_body, &mut locals, arg_bytes.into_iter())?;
+ let span = (span, locals.body.owner);
+ Some(StackFrame { locals, destination: Some(target_bb), prev_stack_ptr, span })
+ } else {
+ let result = self.interpret_mir(mir_body, arg_bytes).map_err(|e| {
MirEvalError::InFunction(
- Either::Left(imp),
- Box::new(MirEvalError::MirLowerError(imp, e)),
- span,
- locals.body.owner,
+ Box::new(e),
+ vec![(Either::Left(def), span, locals.body.owner)],
)
})?;
- let result = self.interpret_mir(&mir_body, arg_bytes.iter().cloned()).map_err(|e| {
- MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner)
- })?;
- destination.write_from_bytes(self, &result)?;
- Ok(())
+ destination.write_from_bytes(self, &result)?;
+ None
+ })
}
fn exec_fn_trait(
&mut self,
+ def: FunctionId,
args: &[IntervalAndTy],
+ generic_args: Substitution,
+ locals: &Locals,
destination: Interval,
- locals: &Locals<'_>,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
let mut func_ty = func.ty.clone();
let mut func_data = func.interval;
@@ -1940,15 +2333,30 @@ impl Evaluator<'_> {
let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?;
func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
}
- match &func_ty.data(Interner).kind {
+ match &func_ty.kind(Interner) {
TyKind::FnDef(def, subst) => {
- self.exec_fn_def(*def, subst, destination, &args[1..], locals, span)?;
+ return self.exec_fn_def(
+ *def,
+ subst,
+ destination,
+ &args[1..],
+ locals,
+ target_bb,
+ span,
+ );
}
TyKind::Function(_) => {
- self.exec_fn_pointer(func_data, destination, &args[1..], locals, span)?;
+ return self.exec_fn_pointer(
+ func_data,
+ destination,
+ &args[1..],
+ locals,
+ target_bb,
+ span,
+ );
}
TyKind::Closure(closure, subst) => {
- self.exec_closure(
+ return self.exec_closure(
*closure,
func_data,
&Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()),
@@ -1956,14 +2364,45 @@ impl Evaluator<'_> {
&args[1..],
locals,
span,
- )?;
+ );
+ }
+ _ => {
+ // try to execute the manual impl of `FnTrait` for structs (nightly feature used in std)
+ let arg0 = func;
+ let args = &args[1..];
+ let arg1 = {
+ let ty = TyKind::Tuple(
+ args.len(),
+ Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())),
+ )
+ .intern(Interner);
+ let layout = self.layout(&ty)?;
+ let result = self.make_by_layout(
+ layout.size.bytes_usize(),
+ &layout,
+ None,
+ args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)),
+ )?;
+ // FIXME: there is some leak here
+ let size = layout.size.bytes_usize();
+ let addr = self.heap_allocate(size, layout.align.abi.bytes() as usize)?;
+ self.write_memory(addr, &result)?;
+ IntervalAndTy { interval: Interval { addr, size }, ty }
+ };
+ return self.exec_fn_with_args(
+ def,
+ &[arg0.clone(), arg1],
+ generic_args,
+ locals,
+ destination,
+ target_bb,
+ span,
+ );
}
- x => not_supported!("Call FnTrait methods with type {x:?}"),
}
- Ok(())
}
- fn eval_static(&mut self, st: StaticId, locals: &Locals<'_>) -> Result<Address> {
+ fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<Address> {
if let Some(o) = self.static_locations.get(&st) {
return Ok(*o);
};
@@ -1975,21 +2414,16 @@ impl Evaluator<'_> {
Box::new(e),
)
})?;
- let data = &konst.data(Interner);
- if let chalk_ir::ConstValue::Concrete(c) = &data.value {
- self.allocate_const_in_heap(&c, &data.ty, locals, &konst)?
- } else {
- not_supported!("unevaluatable static");
- }
+ self.allocate_const_in_heap(locals, &konst)?
} else {
let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr];
let Some((size, align)) = self.size_align_of(&ty, locals)? else {
not_supported!("unsized extern static");
};
- let addr = self.heap_allocate(size, align);
+ let addr = self.heap_allocate(size, align)?;
Interval::new(addr, size)
};
- let addr = self.heap_allocate(self.ptr_size(), self.ptr_size());
+ let addr = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
self.write_memory(addr, &result.addr.to_bytes())?;
self.static_locations.insert(st, addr);
Ok(addr)
@@ -2011,13 +2445,13 @@ impl Evaluator<'_> {
}
}
- fn drop_place(&mut self, place: &Place, locals: &mut Locals<'_>, span: MirSpan) -> Result<()> {
+ fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<()> {
let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
if !locals.drop_flags.remove_place(place) {
return Ok(());
}
let metadata = match metadata {
- Some(x) => x.get(self)?.to_vec(),
+ Some(it) => it.get(self)?.to_vec(),
None => vec![],
};
self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
@@ -2026,7 +2460,7 @@ impl Evaluator<'_> {
fn run_drop_glue_deep(
&mut self,
ty: Ty,
- locals: &Locals<'_>,
+ locals: &Locals,
addr: Address,
_metadata: &[u8],
span: MirSpan,
@@ -2039,20 +2473,19 @@ impl Evaluator<'_> {
// we can ignore drop in them.
return Ok(());
};
- let (impl_drop_candidate, subst) = lookup_impl_method(
- self.db,
- self.trait_env.clone(),
- drop_fn,
- Substitution::from1(Interner, ty.clone()),
- );
- if impl_drop_candidate != drop_fn {
+
+ let generic_args = Substitution::from1(Interner, ty.clone());
+ if let Ok(MirOrDynIndex::Mir(body)) =
+ self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
+ {
self.exec_looked_up_function(
- subst,
+ body,
locals,
- impl_drop_candidate,
- vec![addr.to_bytes()],
+ drop_fn,
+ [IntervalOrOwned::Owned(addr.to_bytes())].into_iter(),
span,
Interval { addr: Address::Invalid(0), size: 0 },
+ None,
)?;
}
match ty.kind(Interner) {
@@ -2121,10 +2554,77 @@ impl Evaluator<'_> {
}
}
-pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {
- let is_negative = is_signed && x.last().unwrap_or(&0) > &128;
+pub fn render_const_using_debug_impl(
+ db: &dyn HirDatabase,
+ owner: ConstId,
+ c: &Const,
+) -> Result<String> {
+ let mut evaluator = Evaluator::new(db, owner.into(), false, None);
+ let locals = &Locals {
+ ptr: ArenaMap::new(),
+ body: db
+ .mir_body(owner.into())
+ .map_err(|_| MirEvalError::NotSupported("unreachable".to_string()))?,
+ drop_flags: DropFlags::default(),
+ };
+ let data = evaluator.allocate_const_in_heap(locals, c)?;
+ let resolver = owner.resolver(db.upcast());
+ let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully(
+ db.upcast(),
+ &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
+ hir_expand::mod_path::PathKind::Abs,
+ [name![core], name![fmt], name![Debug]].into_iter(),
+ )),
+ ) else {
+ not_supported!("core::fmt::Debug not found");
+ };
+ let Some(debug_fmt_fn) = db.trait_data(debug_trait).method_by_name(&name![fmt]) else {
+ not_supported!("core::fmt::Debug::fmt not found");
+ };
+ // a1 = &[""]
+ let a1 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
+ // a2 = &[::core::fmt::ArgumentV1::new(&(THE_CONST), ::core::fmt::Debug::fmt)]
+ // FIXME: we should call the said function, but since its name is going to break in the next rustc version
+ // and its ABI doesn't break yet, we put it in memory manually.
+ let a2 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
+ evaluator.write_memory(a2, &data.addr.to_bytes())?;
+ let debug_fmt_fn_ptr = evaluator.vtable_map.id(TyKind::FnDef(
+ db.intern_callable_def(debug_fmt_fn.into()).into(),
+ Substitution::from1(Interner, c.data(Interner).ty.clone()),
+ )
+ .intern(Interner));
+ evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
+ // a3 = ::core::fmt::Arguments::new_v1(a1, a2)
+ // FIXME: similarly, we should call function here, not directly working with memory.
+ let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?;
+ evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a1.to_bytes())?;
+ evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
+ evaluator.write_memory(a3.offset(4 * evaluator.ptr_size()), &a2.to_bytes())?;
+ evaluator.write_memory(a3.offset(5 * evaluator.ptr_size()), &[1])?;
+ let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
+ db.upcast(),
+ &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
+ hir_expand::mod_path::PathKind::Abs,
+ [name![std], name![fmt], name![format]].into_iter(),
+ )),
+ ) else {
+ not_supported!("std::fmt::format not found");
+ };
+ let message_string = evaluator.interpret_mir(
+ db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
+ [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })]
+ .into_iter(),
+ )?;
+ let addr =
+ Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?;
+ let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]);
+ Ok(std::string::String::from_utf8_lossy(evaluator.read_memory(addr, size)?).into_owned())
+}
+
+pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
+ let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
let fill_with = if is_negative { 255 } else { 0 };
- x.iter()
+ it.iter()
.copied()
.chain(iter::repeat(fill_with))
.take(16)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index 3b9ef03c3..b2e29fd34 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -3,20 +3,26 @@
use std::cmp;
+use chalk_ir::TyKind;
+use hir_def::resolver::HasResolver;
+use hir_expand::mod_path::ModPath;
+
use super::*;
+mod simd;
+
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
- Ok(x) => x,
+ Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
}))
};
}
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirEvalError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirEvalError::NotSupported(format!($it)))
};
}
@@ -26,10 +32,13 @@ impl Evaluator<'_> {
def: FunctionId,
args: &[IntervalAndTy],
generic_args: &Substitution,
- locals: &Locals<'_>,
+ locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<bool> {
+ if self.not_special_fn_cache.borrow().contains(&def) {
+ return Ok(false);
+ }
let function_data = self.db.function_data(def);
let is_intrinsic = match &function_data.abi {
Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
@@ -53,6 +62,28 @@ impl Evaluator<'_> {
)?;
return Ok(true);
}
+ let is_platform_intrinsic = match &function_data.abi {
+ Some(abi) => *abi == Interned::new_str("platform-intrinsic"),
+ None => match def.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ let id = block.lookup(self.db.upcast()).id;
+ id.item_tree(self.db.upcast())[id.value].abi.as_deref()
+ == Some("platform-intrinsic")
+ }
+ _ => false,
+ },
+ };
+ if is_platform_intrinsic {
+ self.exec_platform_intrinsic(
+ function_data.name.as_text().unwrap_or_default().as_str(),
+ args,
+ generic_args,
+ destination,
+ &locals,
+ span,
+ )?;
+ return Ok(true);
+ }
let is_extern_c = match def.lookup(self.db.upcast()).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
let id = block.lookup(self.db.upcast()).id;
@@ -74,31 +105,110 @@ impl Evaluator<'_> {
let alloc_fn = function_data
.attrs
.iter()
- .filter_map(|x| x.path().as_ident())
- .filter_map(|x| x.as_str())
- .find(|x| {
+ .filter_map(|it| it.path().as_ident())
+ .filter_map(|it| it.as_str())
+ .find(|it| {
[
"rustc_allocator",
"rustc_deallocator",
"rustc_reallocator",
"rustc_allocator_zeroed",
]
- .contains(x)
+ .contains(it)
});
if let Some(alloc_fn) = alloc_fn {
self.exec_alloc_fn(alloc_fn, args, destination)?;
return Ok(true);
}
- if let Some(x) = self.detect_lang_function(def) {
+ if let Some(it) = self.detect_lang_function(def) {
let arg_bytes =
- args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
- let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?;
+ args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
+ let result = self.exec_lang_item(it, generic_args, &arg_bytes, locals, span)?;
destination.write_from_bytes(self, &result)?;
return Ok(true);
}
+ if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container {
+ if self.db.lang_attr(t.into()) == Some(LangItem::Clone) {
+ let [self_ty] = generic_args.as_slice(Interner) else {
+ not_supported!("wrong generic arg count for clone");
+ };
+ let Some(self_ty) = self_ty.ty(Interner) else {
+ not_supported!("wrong generic arg kind for clone");
+ };
+ // Clone has special impls for tuples and function pointers
+ if matches!(self_ty.kind(Interner), TyKind::Function(_) | TyKind::Tuple(..)) {
+ self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
+ return Ok(true);
+ }
+ // Return early to prevent caching clone as non special fn.
+ return Ok(false);
+ }
+ }
+ self.not_special_fn_cache.borrow_mut().insert(def);
Ok(false)
}
+ /// Clone has special impls for tuples and function pointers
+ fn exec_clone(
+ &mut self,
+ def: FunctionId,
+ args: &[IntervalAndTy],
+ self_ty: Ty,
+ locals: &Locals,
+ destination: Interval,
+ span: MirSpan,
+ ) -> Result<()> {
+ match self_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let [arg] = args else {
+ not_supported!("wrong arg count for clone");
+ };
+ let addr = Address::from_bytes(arg.get(self)?)?;
+ return destination
+ .write_from_interval(self, Interval { addr, size: destination.size });
+ }
+ TyKind::Tuple(_, subst) => {
+ let [arg] = args else {
+ not_supported!("wrong arg count for clone");
+ };
+ let addr = Address::from_bytes(arg.get(self)?)?;
+ let layout = self.layout(&self_ty)?;
+ for (i, ty) in subst.iter(Interner).enumerate() {
+ let ty = ty.assert_ty_ref(Interner);
+ let size = self.layout(ty)?.size.bytes_usize();
+ let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
+ let arg = IntervalAndTy {
+ interval: Interval { addr: tmp, size: self.ptr_size() },
+ ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone())
+ .intern(Interner),
+ };
+ let offset = layout.fields.offset(i).bytes_usize();
+ self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
+ self.exec_clone(
+ def,
+ &[arg],
+ ty.clone(),
+ locals,
+ destination.slice(offset..offset + size),
+ span,
+ )?;
+ }
+ }
+ _ => {
+ self.exec_fn_with_args(
+ def,
+ args,
+ Substitution::from1(Interner, self_ty),
+ locals,
+ destination,
+ None,
+ span,
+ )?;
+ }
+ }
+ Ok(())
+ }
+
fn exec_alloc_fn(
&mut self,
alloc_fn: &str,
@@ -112,7 +222,7 @@ impl Evaluator<'_> {
};
let size = from_bytes!(usize, size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
- let result = self.heap_allocate(size, align);
+ let result = self.heap_allocate(size, align)?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
"rustc_deallocator" => { /* no-op for now */ }
@@ -120,14 +230,18 @@ impl Evaluator<'_> {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
};
- let ptr = Address::from_bytes(ptr.get(self)?)?;
let old_size = from_bytes!(usize, old_size.get(self)?);
let new_size = from_bytes!(usize, new_size.get(self)?);
- let align = from_bytes!(usize, align.get(self)?);
- let result = self.heap_allocate(new_size, align);
- Interval { addr: result, size: old_size }
- .write_from_interval(self, Interval { addr: ptr, size: old_size })?;
- destination.write_from_bytes(self, &result.to_bytes())?;
+ if old_size >= new_size {
+ destination.write_from_interval(self, ptr.interval)?;
+ } else {
+ let ptr = Address::from_bytes(ptr.get(self)?)?;
+ let align = from_bytes!(usize, align.get(self)?);
+ let result = self.heap_allocate(new_size, align)?;
+ Interval { addr: result, size: old_size }
+ .write_from_interval(self, Interval { addr: ptr, size: old_size })?;
+ destination.write_from_bytes(self, &result.to_bytes())?;
+ }
}
_ => not_supported!("unknown alloc function"),
}
@@ -136,7 +250,7 @@ impl Evaluator<'_> {
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
use LangItem::*;
- let candidate = lang_attr(self.db.upcast(), def)?;
+ let candidate = self.db.lang_attr(def.into())?;
// We want to execute these functions with special logic
if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
return Some(candidate);
@@ -146,56 +260,35 @@ impl Evaluator<'_> {
fn exec_lang_item(
&mut self,
- x: LangItem,
+ it: LangItem,
generic_args: &Substitution,
args: &[Vec<u8>],
- locals: &Locals<'_>,
+ locals: &Locals,
span: MirSpan,
) -> Result<Vec<u8>> {
use LangItem::*;
let mut args = args.iter();
- match x {
+ match it {
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
PanicFmt => {
let message = (|| {
- let arguments_struct =
- self.db.lang_item(self.crate_id, LangItem::FormatArguments)?.as_struct()?;
- let arguments_layout = self
- .layout_adt(arguments_struct.into(), Substitution::empty(Interner))
- .ok()?;
- let arguments_field_pieces =
- self.db.struct_data(arguments_struct).variant_data.field(&name![pieces])?;
- let pieces_offset = arguments_layout
- .fields
- .offset(u32::from(arguments_field_pieces.into_raw()) as usize)
- .bytes_usize();
- let ptr_size = self.ptr_size();
- let arg = args.next()?;
- let pieces_array_addr =
- Address::from_bytes(&arg[pieces_offset..pieces_offset + ptr_size]).ok()?;
- let pieces_array_len = usize::from_le_bytes(
- (&arg[pieces_offset + ptr_size..pieces_offset + 2 * ptr_size])
- .try_into()
- .ok()?,
- );
- let mut message = "".to_string();
- for i in 0..pieces_array_len {
- let piece_ptr_addr = pieces_array_addr.offset(2 * i * ptr_size);
- let piece_addr =
- Address::from_bytes(self.read_memory(piece_ptr_addr, ptr_size).ok()?)
- .ok()?;
- let piece_len = usize::from_le_bytes(
- self.read_memory(piece_ptr_addr.offset(ptr_size), ptr_size)
- .ok()?
- .try_into()
- .ok()?,
- );
- let piece_data = self.read_memory(piece_addr, piece_len).ok()?;
- message += &std::string::String::from_utf8_lossy(piece_data);
- }
- Some(message)
+ let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast());
+ let Some(format_fn) = resolver.resolve_path_in_value_ns_fully(
+ self.db.upcast(),
+ &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
+ hir_expand::mod_path::PathKind::Abs,
+ [name![std], name![fmt], name![format]].into_iter(),
+ )),
+ ) else {
+ not_supported!("std::fmt::format not found");
+ };
+ let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { not_supported!("std::fmt::format is not a function") };
+ let message_string = self.interpret_mir(self.db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, args.map(|x| IntervalOrOwned::Owned(x.clone())))?;
+ let addr = Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?;
+ let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]);
+ Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?).into_owned())
})()
- .unwrap_or_else(|| "<format-args-evaluation-failed>".to_string());
+ .unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}"));
Err(MirEvalError::Panic(message))
}
SliceLen => {
@@ -207,7 +300,7 @@ impl Evaluator<'_> {
}
DropInPlace => {
let ty =
- generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or(
+ generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)).ok_or(
MirEvalError::TypeError(
"generic argument of drop_in_place is not provided",
),
@@ -224,7 +317,35 @@ impl Evaluator<'_> {
)?;
Ok(vec![])
}
- x => not_supported!("Executing lang item {x:?}"),
+ it => not_supported!("Executing lang item {it:?}"),
+ }
+ }
+
+ fn exec_syscall(
+ &mut self,
+ id: i64,
+ args: &[IntervalAndTy],
+ destination: Interval,
+ _locals: &Locals,
+ _span: MirSpan,
+ ) -> Result<()> {
+ match id {
+ 318 => {
+ // SYS_getrandom
+ let [buf, len, _flags] = args else {
+ return Err(MirEvalError::TypeError("SYS_getrandom args are not provided"));
+ };
+ let addr = Address::from_bytes(buf.get(self)?)?;
+ let size = from_bytes!(usize, len.get(self)?);
+ for i in 0..size {
+ let rand_byte = self.random_state.rand_u64() as u8;
+ self.write_memory(addr.offset(i), &[rand_byte])?;
+ }
+ destination.write_from_interval(self, len.interval)
+ }
+ _ => {
+ not_supported!("Unknown syscall id {id:?}")
+ }
}
}
@@ -234,8 +355,8 @@ impl Evaluator<'_> {
args: &[IntervalAndTy],
_generic_args: &Substitution,
destination: Interval,
- locals: &Locals<'_>,
- _span: MirSpan,
+ locals: &Locals,
+ span: MirSpan,
) -> Result<()> {
match as_str {
"memcmp" => {
@@ -299,7 +420,9 @@ impl Evaluator<'_> {
}
"pthread_getspecific" => {
let Some(arg0) = args.get(0) else {
- return Err(MirEvalError::TypeError("pthread_getspecific arg0 is not provided"));
+ return Err(MirEvalError::TypeError(
+ "pthread_getspecific arg0 is not provided",
+ ));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let value = self.thread_local_storage.get_key(key)?;
@@ -308,11 +431,15 @@ impl Evaluator<'_> {
}
"pthread_setspecific" => {
let Some(arg0) = args.get(0) else {
- return Err(MirEvalError::TypeError("pthread_setspecific arg0 is not provided"));
+ return Err(MirEvalError::TypeError(
+ "pthread_setspecific arg0 is not provided",
+ ));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let Some(arg1) = args.get(1) else {
- return Err(MirEvalError::TypeError("pthread_setspecific arg1 is not provided"));
+ return Err(MirEvalError::TypeError(
+ "pthread_setspecific arg1 is not provided",
+ ));
};
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
self.thread_local_storage.set_key(key, value)?;
@@ -326,17 +453,52 @@ impl Evaluator<'_> {
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
+ "syscall" => {
+ let Some((id, rest)) = args.split_first() else {
+ return Err(MirEvalError::TypeError(
+ "syscall arg1 is not provided",
+ ));
+ };
+ let id = from_bytes!(i64, id.get(self)?);
+ self.exec_syscall(id, rest, destination, locals, span)
+ }
+ "sched_getaffinity" => {
+ let [_pid, _set_size, set] = args else {
+ return Err(MirEvalError::TypeError("libc::write args are not provided"));
+ };
+ let set = Address::from_bytes(set.get(self)?)?;
+ // Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
+ self.write_memory(set, &[1])?;
+ // return 0 as success
+ self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
+ Ok(())
+ }
_ => not_supported!("unknown external function {as_str}"),
}
}
+ fn exec_platform_intrinsic(
+ &mut self,
+ name: &str,
+ args: &[IntervalAndTy],
+ generic_args: &Substitution,
+ destination: Interval,
+ locals: &Locals,
+ span: MirSpan,
+ ) -> Result<()> {
+ if let Some(name) = name.strip_prefix("simd_") {
+ return self.exec_simd_intrinsic(name, args, generic_args, destination, locals, span);
+ }
+ not_supported!("unknown platform intrinsic {name}");
+ }
+
fn exec_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
- locals: &Locals<'_>,
+ locals: &Locals,
span: MirSpan,
) -> Result<()> {
if let Some(name) = name.strip_prefix("atomic_") {
@@ -347,7 +509,9 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "f64 intrinsic signature doesn't match fn (f64) -> f64",
+ ));
};
let arg = from_bytes!(f64, arg.get(self)?);
match name {
@@ -373,7 +537,9 @@ impl Evaluator<'_> {
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64, f64) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "f64 intrinsic signature doesn't match fn (f64, f64) -> f64",
+ ));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(f64, arg2.get(self)?);
@@ -387,7 +553,9 @@ impl Evaluator<'_> {
}
"powi" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("powif64 signature doesn't match fn (f64, i32) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "powif64 signature doesn't match fn (f64, i32) -> f64",
+ ));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(i32, arg2.get(self)?);
@@ -395,7 +563,9 @@ impl Evaluator<'_> {
}
"fma" => {
let [arg1, arg2, arg3] = args else {
- return Err(MirEvalError::TypeError("fmaf64 signature doesn't match fn (f64, f64, f64) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64",
+ ));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(f64, arg2.get(self)?);
@@ -411,7 +581,9 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "f32 intrinsic signature doesn't match fn (f32) -> f32",
+ ));
};
let arg = from_bytes!(f32, arg.get(self)?);
match name {
@@ -437,7 +609,9 @@ impl Evaluator<'_> {
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32, f32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "f32 intrinsic signature doesn't match fn (f32, f32) -> f32",
+ ));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(f32, arg2.get(self)?);
@@ -451,7 +625,9 @@ impl Evaluator<'_> {
}
"powi" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("powif32 signature doesn't match fn (f32, i32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "powif32 signature doesn't match fn (f32, i32) -> f32",
+ ));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(i32, arg2.get(self)?);
@@ -459,7 +635,9 @@ impl Evaluator<'_> {
}
"fma" => {
let [arg1, arg2, arg3] = args else {
- return Err(MirEvalError::TypeError("fmaf32 signature doesn't match fn (f32, f32, f32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32",
+ ));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(f32, arg2.get(self)?);
@@ -472,21 +650,77 @@ impl Evaluator<'_> {
}
match name {
"size_of" => {
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
}
"min_align_of" | "pref_align_of" => {
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
};
let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
+ "size_of_val" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
+ };
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("size_of_val args are not provided"));
+ };
+ if let Some((size, _)) = self.size_align_of(ty, locals)? {
+ destination.write_from_bytes(self, &size.to_le_bytes())
+ } else {
+ let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
+ let (size, _) = self.size_align_of_unsized(ty, metadata, locals)?;
+ destination.write_from_bytes(self, &size.to_le_bytes())
+ }
+ }
+ "min_align_of_val" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
+ return Err(MirEvalError::TypeError("min_align_of_val generic arg is not provided"));
+ };
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("min_align_of_val args are not provided"));
+ };
+ if let Some((_, align)) = self.size_align_of(ty, locals)? {
+ destination.write_from_bytes(self, &align.to_le_bytes())
+ } else {
+ let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
+ let (_, align) = self.size_align_of_unsized(ty, metadata, locals)?;
+ destination.write_from_bytes(self, &align.to_le_bytes())
+ }
+ }
+ "type_name" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
+ };
+ let ty_name = match ty.display_source_code(
+ self.db,
+ locals.body.owner.module(self.db.upcast()),
+ true,
+ ) {
+ Ok(ty_name) => ty_name,
+ // Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
+ // render full paths.
+ Err(_) => ty.display(self.db).to_string(),
+ };
+ let len = ty_name.len();
+ let addr = self.heap_allocate(len, 1)?;
+ self.write_memory(addr, ty_name.as_bytes())?;
+ destination.slice(0..self.ptr_size()).write_from_bytes(self, &addr.to_bytes())?;
+ destination
+ .slice(self.ptr_size()..2 * self.ptr_size())
+ .write_from_bytes(self, &len.to_le_bytes())
+ }
"needs_drop" => {
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let result = !ty.clone().is_copy(self.db, locals.body.owner);
@@ -501,13 +735,17 @@ impl Evaluator<'_> {
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
- "saturating_add" => {
+ "saturating_add" | "saturating_sub" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
- let ans = lhs.saturating_add(rhs);
+ let ans = match name {
+ "saturating_add" => lhs.saturating_add(rhs),
+ "saturating_sub" => lhs.saturating_sub(rhs),
+ _ => unreachable!(),
+ };
let bits = destination.size * 8;
// FIXME: signed
let is_signed = false;
@@ -526,7 +764,22 @@ impl Evaluator<'_> {
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
- "wrapping_sub" | "unchecked_sub" | "ptr_offset_from_unsigned" | "ptr_offset_from" => {
+ "ptr_offset_from_unsigned" | "ptr_offset_from" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
+ };
+ let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
+ let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
+ let ans = lhs.wrapping_sub(rhs);
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError("ptr_offset_from generic arg is not provided"));
+ };
+ let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
+ let ans = ans / size;
+ destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
+ }
+ "wrapping_sub" | "unchecked_sub" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
};
@@ -544,6 +797,26 @@ impl Evaluator<'_> {
let ans = lhs.wrapping_mul(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
+ "wrapping_shl" | "unchecked_shl" => {
+ // FIXME: signed
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("unchecked_shl args are not provided"));
+ };
+ let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
+ let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
+ let ans = lhs.wrapping_shl(rhs as u32);
+ destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
+ }
+ "wrapping_shr" | "unchecked_shr" => {
+ // FIXME: signed
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("unchecked_shr args are not provided"));
+ };
+ let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
+ let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
+ let ans = lhs.wrapping_shr(rhs as u32);
+ destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
+ }
"unchecked_rem" => {
// FIXME: signed
let [lhs, rhs] = args else {
@@ -588,7 +861,7 @@ impl Evaluator<'_> {
_ => unreachable!(),
};
let is_overflow = u128overflow
- || ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255);
+ || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(
@@ -603,10 +876,15 @@ impl Evaluator<'_> {
}
"copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else {
- return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided"));
+ return Err(MirEvalError::TypeError(
+ "copy_nonoverlapping args are not provided",
+ ));
};
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
- return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided"));
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError(
+ "copy_nonoverlapping generic arg is not provided",
+ ));
};
let src = Address::from_bytes(src.get(self)?)?;
let dst = Address::from_bytes(dst.get(self)?)?;
@@ -621,7 +899,8 @@ impl Evaluator<'_> {
let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided"));
};
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
@@ -652,20 +931,106 @@ impl Evaluator<'_> {
}
"ctpop" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("likely arg is not provided"));
+ return Err(MirEvalError::TypeError("ctpop arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
+ "ctlz" | "ctlz_nonzero" => {
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("cttz arg is not provided"));
+ };
+ let result =
+ u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
+ let result = result - (128 - arg.interval.size * 8);
+ destination
+ .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
+ }
"cttz" | "cttz_nonzero" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("likely arg is not provided"));
+ return Err(MirEvalError::TypeError("cttz arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
+ "rotate_left" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("rotate_left args are not provided"));
+ };
+ let lhs = &lhs.get(self)?[0..destination.size];
+ let rhs = rhs.get(self)?[0] as u32;
+ match destination.size {
+ 1 => {
+ let r = from_bytes!(u8, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 2 => {
+ let r = from_bytes!(u16, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 4 => {
+ let r = from_bytes!(u32, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 8 => {
+ let r = from_bytes!(u64, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 16 => {
+ let r = from_bytes!(u128, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ s => not_supported!("destination with size {s} for rotate_left"),
+ }
+ }
+ "rotate_right" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("rotate_right args are not provided"));
+ };
+ let lhs = &lhs.get(self)?[0..destination.size];
+ let rhs = rhs.get(self)?[0] as u32;
+ match destination.size {
+ 1 => {
+ let r = from_bytes!(u8, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 2 => {
+ let r = from_bytes!(u16, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 4 => {
+ let r = from_bytes!(u32, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 8 => {
+ let r = from_bytes!(u64, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 16 => {
+ let r = from_bytes!(u128, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ s => not_supported!("destination with size {s} for rotate_right"),
+ }
+ }
+ "discriminant_value" => {
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
+ };
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError(
+ "discriminant_value generic arg is not provided",
+ ));
+ };
+ let addr = Address::from_bytes(arg.get(self)?)?;
+ let size = self.size_of_sized(ty, locals, "discriminant_value ptr type")?;
+ let interval = Interval { addr, size };
+ let r = self.compute_discriminant(ty.clone(), interval.get(self)?)?;
+ destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])
+ }
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
@@ -681,24 +1046,126 @@ impl Evaluator<'_> {
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
- self.exec_fn_trait(&args, destination, locals, span)
+ if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) {
+ if let Some(def) = target
+ .as_trait()
+ .and_then(|it| self.db.trait_data(it).method_by_name(&name![call_once]))
+ {
+ self.exec_fn_trait(
+ def,
+ &args,
+ // FIXME: wrong for manual impls of `FnOnce`
+ Substitution::empty(Interner),
+ locals,
+ destination,
+ None,
+ span,
+ )?;
+ return Ok(());
+ }
+ }
+ not_supported!("FnOnce was not available for executing const_eval_select");
+ }
+ "read_via_copy" | "volatile_load" => {
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("read_via_copy args are not provided"));
+ };
+ let addr = Address::from_bytes(arg.interval.get(self)?)?;
+ destination.write_from_interval(self, Interval { addr, size: destination.size })
+ }
+ "write_bytes" => {
+ let [dst, val, count] = args else {
+ return Err(MirEvalError::TypeError("write_bytes args are not provided"));
+ };
+ let count = from_bytes!(usize, count.get(self)?);
+ let val = from_bytes!(u8, val.get(self)?);
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError(
+ "write_bytes generic arg is not provided",
+ ));
+ };
+ let dst = Address::from_bytes(dst.get(self)?)?;
+ let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
+ let size = count * size;
+ self.write_memory_using_ref(dst, size)?.fill(val);
+ Ok(())
}
_ => not_supported!("unknown intrinsic {name}"),
}
}
+ fn size_align_of_unsized(
+ &mut self,
+ ty: &Ty,
+ metadata: Interval,
+ locals: &Locals,
+ ) -> Result<(usize, usize)> {
+ Ok(match ty.kind(Interner) {
+ TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
+ TyKind::Slice(inner) => {
+ let len = from_bytes!(usize, metadata.get(self)?);
+ let (size, align) = self.size_align_of_sized(inner, locals, "slice inner type")?;
+ (size * len, align)
+ }
+ TyKind::Dyn(_) => self.size_align_of_sized(
+ self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
+ locals,
+ "dyn concrete type",
+ )?,
+ TyKind::Adt(id, subst) => {
+ let id = id.0;
+ let layout = self.layout_adt(id, subst.clone())?;
+ let id = match id {
+ AdtId::StructId(s) => s,
+ _ => not_supported!("unsized enum or union"),
+ };
+ let field_types = &self.db.field_types(id.into());
+ let last_field_ty =
+ field_types.iter().rev().next().unwrap().1.clone().substitute(Interner, subst);
+ let sized_part_size =
+ layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
+ let sized_part_align = layout.align.abi.bytes() as usize;
+ let (unsized_part_size, unsized_part_align) =
+ self.size_align_of_unsized(&last_field_ty, metadata, locals)?;
+ let align = sized_part_align.max(unsized_part_align) as isize;
+ let size = (sized_part_size + unsized_part_size) as isize;
+ // Must add any necessary padding to `size`
+ // (to make it a multiple of `align`) before returning it.
+ //
+ // Namely, the returned size should be, in C notation:
+ //
+ // `size + ((size & (align-1)) ? align : 0)`
+ //
+ // emulated via the semi-standard fast bit trick:
+ //
+ // `(size + (align-1)) & -align`
+ let size = (size + (align - 1)) & (-align);
+ (size as usize, align as usize)
+ }
+ _ => not_supported!("unsized type other than str, slice, struct and dyn"),
+ })
+ }
+
fn exec_atomic_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
- locals: &Locals<'_>,
+ locals: &Locals,
_span: MirSpan,
) -> Result<()> {
// We are a single threaded runtime with no UB checking and no optimization, so
- // we can implement these as normal functions.
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ // we can implement atomic intrinsics as normal functions.
+
+ if name.starts_with("singlethreadfence_") || name.starts_with("fence_") {
+ return Ok(());
+ }
+
+ // The rest of atomic intrinsics have exactly one generic arg
+
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
};
let Some(arg0) = args.get(0) else {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
new file mode 100644
index 000000000..ec7463104
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -0,0 +1,177 @@
+//! Shim implementation for simd intrinsics
+
+use std::cmp::Ordering;
+
+use crate::TyKind;
+
+use super::*;
+
+macro_rules! from_bytes {
+ ($ty:tt, $value:expr) => {
+ ($ty::from_le_bytes(match ($value).try_into() {
+ Ok(it) => it,
+ Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
+ }))
+ };
+}
+
+macro_rules! not_supported {
+ ($it: expr) => {
+ return Err(MirEvalError::NotSupported(format!($it)))
+ };
+}
+
+impl Evaluator<'_> {
+ fn detect_simd_ty(&self, ty: &Ty) -> Result<(usize, Ty)> {
+ match ty.kind(Interner) {
+ TyKind::Adt(id, subst) => {
+ let len = match subst.as_slice(Interner).get(1).and_then(|it| it.constant(Interner))
+ {
+ Some(len) => len,
+ _ => {
+ if let AdtId::StructId(id) = id.0 {
+ let struct_data = self.db.struct_data(id);
+ let fields = struct_data.variant_data.fields();
+ let Some((first_field, _)) = fields.iter().next() else {
+ not_supported!("simd type with no field");
+ };
+ let field_ty = self.db.field_types(id.into())[first_field]
+ .clone()
+ .substitute(Interner, subst);
+ return Ok((fields.len(), field_ty));
+ }
+ return Err(MirEvalError::TypeError("simd type with no len param"));
+ }
+ };
+ match try_const_usize(self.db, len) {
+ Some(len) => {
+ let Some(ty) = subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
+ return Err(MirEvalError::TypeError("simd type with no ty param"));
+ };
+ Ok((len as usize, ty.clone()))
+ }
+ None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")),
+ }
+ }
+ _ => Err(MirEvalError::TypeError("simd type which is not a struct")),
+ }
+ }
+
+ pub(super) fn exec_simd_intrinsic(
+ &mut self,
+ name: &str,
+ args: &[IntervalAndTy],
+ _generic_args: &Substitution,
+ destination: Interval,
+ _locals: &Locals,
+ _span: MirSpan,
+ ) -> Result<()> {
+ match name {
+ "and" | "or" | "xor" => {
+ let [left, right] = args else {
+ return Err(MirEvalError::TypeError("simd bit op args are not provided"));
+ };
+ let result = left
+ .get(self)?
+ .iter()
+ .zip(right.get(self)?)
+ .map(|(&it, &y)| match name {
+ "and" => it & y,
+ "or" => it | y,
+ "xor" => it ^ y,
+ _ => unreachable!(),
+ })
+ .collect::<Vec<_>>();
+ destination.write_from_bytes(self, &result)
+ }
+ "eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
+ let [left, right] = args else {
+ return Err(MirEvalError::TypeError("simd args are not provided"));
+ };
+ let (len, ty) = self.detect_simd_ty(&left.ty)?;
+ let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
+ let size = left.interval.size / len;
+ let dest_size = destination.size / len;
+ let mut destination_bytes = vec![];
+ let vector = left.get(self)?.chunks(size).zip(right.get(self)?.chunks(size));
+ for (l, r) in vector {
+ let mut result = Ordering::Equal;
+ for (l, r) in l.iter().zip(r).rev() {
+ let it = l.cmp(r);
+ if it != Ordering::Equal {
+ result = it;
+ break;
+ }
+ }
+ if is_signed {
+ if let Some((&l, &r)) = l.iter().zip(r).rev().next() {
+ if l != r {
+ result = (l as i8).cmp(&(r as i8));
+ }
+ }
+ }
+ let result = match result {
+ Ordering::Less => ["lt", "le", "ne"].contains(&name),
+ Ordering::Equal => ["ge", "le", "eq"].contains(&name),
+ Ordering::Greater => ["ge", "gt", "ne"].contains(&name),
+ };
+ let result = if result { 255 } else { 0 };
+ destination_bytes.extend(std::iter::repeat(result).take(dest_size));
+ }
+
+ destination.write_from_bytes(self, &destination_bytes)
+ }
+ "bitmask" => {
+ let [op] = args else {
+ return Err(MirEvalError::TypeError("simd_bitmask args are not provided"));
+ };
+ let (op_len, _) = self.detect_simd_ty(&op.ty)?;
+ let op_count = op.interval.size / op_len;
+ let mut result: u64 = 0;
+ for (i, val) in op.get(self)?.chunks(op_count).enumerate() {
+ if !val.iter().all(|&it| it == 0) {
+ result |= 1 << i;
+ }
+ }
+ destination.write_from_bytes(self, &result.to_le_bytes()[0..destination.size])
+ }
+ "shuffle" => {
+ let [left, right, index] = args else {
+ return Err(MirEvalError::TypeError("simd_shuffle args are not provided"));
+ };
+ let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
+ return Err(MirEvalError::TypeError(
+ "simd_shuffle index argument has non-array type",
+ ));
+ };
+ let index_len = match try_const_usize(self.db, index_len) {
+ Some(it) => it as usize,
+ None => {
+ return Err(MirEvalError::TypeError(
+ "simd type with unevaluatable len param",
+ ))
+ }
+ };
+ let (left_len, _) = self.detect_simd_ty(&left.ty)?;
+ let left_size = left.interval.size / left_len;
+ let vector =
+ left.get(self)?.chunks(left_size).chain(right.get(self)?.chunks(left_size));
+ let mut result = vec![];
+ for index in index.get(self)?.chunks(index.interval.size / index_len) {
+ let index = from_bytes!(u32, index) as usize;
+ let val = match vector.clone().nth(index) {
+ Some(it) => it,
+ None => {
+ return Err(MirEvalError::TypeError(
+ "out of bound access in simd shuffle",
+ ))
+ }
+ };
+ result.extend(val);
+ }
+ destination.write_from_bytes(self, &result)
+ }
+ _ => not_supported!("unknown simd intrinsic {name}"),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
index ca4268b8f..46165cf3d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
@@ -30,7 +30,7 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr
db.trait_environment(func_id.into()),
)
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
- let (result, stdout, stderr) = interpret_mir(db, &body, false);
+ let (result, stdout, stderr) = interpret_mir(db, body, false, None);
result?;
Ok((stdout, stderr))
}
@@ -183,6 +183,50 @@ fn main() {
}
#[test]
+fn drop_struct_field() {
+ check_pass(
+ r#"
+//- minicore: drop, add, option, cell, builtin_impls
+
+use core::cell::Cell;
+
+fn should_not_reach() {
+ _ // FIXME: replace this function with panic when that works
+}
+
+struct X<'a>(&'a Cell<i32>);
+impl<'a> Drop for X<'a> {
+ fn drop(&mut self) {
+ self.0.set(self.0.get() + 1)
+ }
+}
+
+struct Tuple<'a>(X<'a>, X<'a>, X<'a>);
+
+fn main() {
+ let s = Cell::new(0);
+ {
+ let x0 = X(&s);
+ let xt = Tuple(x0, X(&s), X(&s));
+ let x1 = xt.1;
+ if s.get() != 0 {
+ should_not_reach();
+ }
+ drop(xt.0);
+ if s.get() != 1 {
+ should_not_reach();
+ }
+ }
+ // FIXME: this should be 3
+ if s.get() != 2 {
+ should_not_reach();
+ }
+}
+"#,
+ );
+}
+
+#[test]
fn drop_in_place() {
check_pass(
r#"
@@ -614,6 +658,78 @@ fn main() {
}
#[test]
+fn self_with_capital_s() {
+ check_pass(
+ r#"
+//- minicore: fn, add, copy
+
+struct S1;
+
+impl S1 {
+ fn f() {
+ Self;
+ }
+}
+
+struct S2 {
+ f1: i32,
+}
+
+impl S2 {
+ fn f() {
+ Self { f1: 5 };
+ }
+}
+
+struct S3(i32);
+
+impl S3 {
+ fn f() {
+ Self(2);
+ Self;
+ let this = Self;
+ this(2);
+ }
+}
+
+fn main() {
+ S1::f();
+ S2::f();
+ S3::f();
+}
+ "#,
+ );
+}
+
+#[test]
+fn syscalls() {
+ check_pass(
+ r#"
+//- minicore: option
+
+extern "C" {
+ pub unsafe extern "C" fn syscall(num: i64, ...) -> i64;
+}
+
+const SYS_getrandom: i64 = 318;
+
+fn should_not_reach() {
+ _ // FIXME: replace this function with panic when that works
+}
+
+fn main() {
+ let mut x: i32 = 0;
+ let r = syscall(SYS_getrandom, &mut x, 4usize, 0);
+ if r != 4 {
+ should_not_reach();
+ }
+}
+
+"#,
+ )
+}
+
+#[test]
fn posix_tls() {
check_pass(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index 2cb29b4ab..718df8331 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -146,12 +146,12 @@ impl MirLowerError {
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
}
}
- MirLowerError::MissingFunctionDefinition(owner, x) => {
+ MirLowerError::MissingFunctionDefinition(owner, it) => {
let body = db.body(*owner);
writeln!(
f,
"Missing function definition for {}",
- body.pretty_print_expr(db.upcast(), *owner, *x)
+ body.pretty_print_expr(db.upcast(), *owner, *it)
)?;
}
MirLowerError::TypeMismatch(e) => {
@@ -202,15 +202,15 @@ impl MirLowerError {
}
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirLowerError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirLowerError::NotSupported(format!($it)))
};
}
macro_rules! implementation_error {
- ($x: expr) => {{
- ::stdx::never!("MIR lower implementation bug: {}", format!($x));
- return Err(MirLowerError::ImplementationError(format!($x)));
+ ($it: expr) => {{
+ ::stdx::never!("MIR lower implementation bug: {}", format!($it));
+ return Err(MirLowerError::ImplementationError(format!($it)));
}};
}
@@ -310,24 +310,30 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
}
Adjust::Deref(_) => {
- let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? else {
- return Ok(None);
- };
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)?
+ else {
+ return Ok(None);
+ };
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
Ok(Some(current))
}
Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
- let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
- return Ok(None);
- };
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
+ else {
+ return Ok(None);
+ };
let bk = BorrowKind::from_chalk(*m);
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
}
Adjust::Pointer(cast) => {
- let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
- return Ok(None);
- };
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
+ else {
+ return Ok(None);
+ };
self.push_assignment(
current,
place,
@@ -373,45 +379,49 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
Err(MirLowerError::IncompleteExpr)
- },
+ }
Expr::Path(p) => {
- let pr = if let Some((assoc, subst)) = self
- .infer
- .assoc_resolutions_for_expr(expr_id)
- {
- match assoc {
- hir_def::AssocItemId::ConstId(c) => {
- self.lower_const(c.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
- return Ok(Some(current))
- },
- hir_def::AssocItemId::FunctionId(_) => {
- // FnDefs are zero sized, no action is needed.
- return Ok(Some(current))
+ let pr =
+ if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
+ match assoc {
+ hir_def::AssocItemId::ConstId(c) => {
+ self.lower_const(
+ c.into(),
+ current,
+ place,
+ subst,
+ expr_id.into(),
+ self.expr_ty_without_adjust(expr_id),
+ )?;
+ return Ok(Some(current));
+ }
+ hir_def::AssocItemId::FunctionId(_) => {
+ // FnDefs are zero sized, no action is needed.
+ return Ok(Some(current));
+ }
+ hir_def::AssocItemId::TypeAliasId(_) => {
+ // FIXME: If it is unreachable, use proper error instead of `not_supported`.
+ not_supported!("associated functions and types")
+ }
}
- hir_def::AssocItemId::TypeAliasId(_) => {
- // FIXME: If it is unreachable, use proper error instead of `not_supported`.
- not_supported!("associated functions and types")
- },
- }
- } else if let Some(variant) = self
- .infer
- .variant_resolution_for_expr(expr_id)
- {
- match variant {
- VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
- VariantId::StructId(s) => ValueNs::StructId(s),
- VariantId::UnionId(_) => implementation_error!("Union variant as path"),
- }
- } else {
- let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
- let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
- resolver
- .resolve_path_in_value_ns_fully(self.db.upcast(), p)
- .ok_or_else(unresolved_name)?
- };
+ } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
+ match variant {
+ VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
+ VariantId::StructId(s) => ValueNs::StructId(s),
+ VariantId::UnionId(_) => implementation_error!("Union variant as path"),
+ }
+ } else {
+ let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
+ resolver
+ .resolve_path_in_value_ns_fully(self.db.upcast(), p)
+ .ok_or_else(unresolved_name)?
+ };
match pr {
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
- let Some((temp, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, false)? else {
+ let Some((temp, current)) =
+ self.lower_expr_as_place_without_adjust(current, expr_id, false)?
+ else {
return Ok(None);
};
self.push_assignment(
@@ -423,11 +433,19 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
ValueNs::ConstId(const_id) => {
- self.lower_const(const_id.into(), current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
+ self.lower_const(
+ const_id.into(),
+ current,
+ place,
+ Substitution::empty(Interner),
+ expr_id.into(),
+ self.expr_ty_without_adjust(expr_id),
+ )?;
Ok(Some(current))
}
ValueNs::EnumVariantId(variant_id) => {
- let variant_data = &self.db.enum_data(variant_id.parent).variants[variant_id.local_id];
+ let variant_data =
+ &self.db.enum_data(variant_id.parent).variants[variant_id.local_id];
if variant_data.variant_data.kind() == StructKind::Unit {
let ty = self.infer.type_of_expr[expr_id].clone();
current = self.lower_enum_variant(
@@ -468,17 +486,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
);
Ok(Some(current))
}
- ValueNs::FunctionId(_) | ValueNs::StructId(_) => {
+ ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::ImplSelf(_) => {
// It's probably a unit struct or a zero sized function, so no action is needed.
Ok(Some(current))
}
- x => {
- not_supported!("unknown name {x:?} in value name space");
- }
}
}
Expr::If { condition, then_branch, else_branch } => {
- let Some((discr, current)) = self.lower_expr_to_some_operand(*condition, current)? else {
+ let Some((discr, current)) =
+ self.lower_expr_to_some_operand(*condition, current)?
+ else {
return Ok(None);
};
let start_of_then = self.new_basic_block();
@@ -501,15 +518,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()))
}
Expr::Let { pat, expr } => {
- let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
return Ok(None);
};
- let (then_target, else_target) = self.pattern_match(
- current,
- None,
- cond_place,
- *pat,
- )?;
+ let (then_target, else_target) =
+ self.pattern_match(current, None, cond_place, *pat)?;
self.write_bytes_to_place(
then_target,
place.clone(),
@@ -533,49 +547,35 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Block { id: _, statements, tail, label } => {
if let Some(label) = label {
- self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| {
- if let Some(current) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? {
- let end = this.current_loop_end()?;
- this.set_goto(current, end, expr_id.into());
- }
- Ok(())
- })
+ self.lower_loop(
+ current,
+ place.clone(),
+ Some(*label),
+ expr_id.into(),
+ |this, begin| {
+ if let Some(current) = this.lower_block_to_place(
+ statements,
+ begin,
+ *tail,
+ place,
+ expr_id.into(),
+ )? {
+ let end = this.current_loop_end()?;
+ this.set_goto(current, end, expr_id.into());
+ }
+ Ok(())
+ },
+ )
} else {
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
}
}
- Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
- let scope = this.push_drop_scope();
- if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
- current = scope.pop_and_drop(this, current);
- this.set_goto(current, begin, expr_id.into());
- } else {
- scope.pop_assume_dropped(this);
- }
- Ok(())
- }),
- Expr::While { condition, body, label } => {
- self.lower_loop(current, place, *label, expr_id.into(),|this, begin| {
+ Expr::Loop { body, label } => {
+ self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
let scope = this.push_drop_scope();
- let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else {
- return Ok(());
- };
- let fail_cond = this.new_basic_block();
- let after_cond = this.new_basic_block();
- this.set_terminator(
- to_switch,
- TerminatorKind::SwitchInt {
- discr,
- targets: SwitchTargets::static_if(1, after_cond, fail_cond),
- },
- expr_id.into(),
- );
- let fail_cond = this.drop_until_scope(this.drop_scopes.len() - 1, fail_cond);
- let end = this.current_loop_end()?;
- this.set_goto(fail_cond, end, expr_id.into());
- if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
- let block = scope.pop_and_drop(this, block);
- this.set_goto(block, begin, expr_id.into());
+ if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
+ current = scope.pop_and_drop(this, current);
+ this.set_goto(current, begin, expr_id.into());
} else {
scope.pop_assume_dropped(this);
}
@@ -583,8 +583,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
})
}
Expr::Call { callee, args, .. } => {
- if let Some((func_id, generic_args)) =
- self.infer.method_resolution(expr_id) {
+ if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
let ty = chalk_ir::TyKind::FnDef(
CallableDefId::FunctionId(func_id).to_chalk(self.db),
generic_args,
@@ -601,24 +600,51 @@ impl<'ctx> MirLowerCtx<'ctx> {
);
}
let callee_ty = self.expr_ty_after_adjustments(*callee);
- match &callee_ty.data(Interner).kind {
+ match &callee_ty.kind(Interner) {
chalk_ir::TyKind::FnDef(..) => {
let func = Operand::from_bytes(vec![], callee_ty.clone());
- self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
+ self.lower_call_and_args(
+ func,
+ args.iter().copied(),
+ place,
+ current,
+ self.is_uninhabited(expr_id),
+ expr_id.into(),
+ )
}
chalk_ir::TyKind::Function(_) => {
- let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else {
+ let Some((func, current)) =
+ self.lower_expr_to_some_operand(*callee, current)?
+ else {
return Ok(None);
};
- self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
+ self.lower_call_and_args(
+ func,
+ args.iter().copied(),
+ place,
+ current,
+ self.is_uninhabited(expr_id),
+ expr_id.into(),
+ )
+ }
+ TyKind::Closure(_, _) => {
+ not_supported!(
+ "method resolution not emitted for closure (Are Fn traits available?)"
+ );
+ }
+ TyKind::Error => {
+ return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id))
}
- TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id)),
_ => return Err(MirLowerError::TypeError("function call on bad type")),
}
}
Expr::MethodCall { receiver, args, method_name, .. } => {
let (func_id, generic_args) =
- self.infer.method_resolution(expr_id).ok_or_else(|| MirLowerError::UnresolvedMethod(method_name.display(self.db.upcast()).to_string()))?;
+ self.infer.method_resolution(expr_id).ok_or_else(|| {
+ MirLowerError::UnresolvedMethod(
+ method_name.display(self.db.upcast()).to_string(),
+ )
+ })?;
let func = Operand::from_fn(self.db, func_id, generic_args);
self.lower_call_and_args(
func,
@@ -630,23 +656,27 @@ impl<'ctx> MirLowerCtx<'ctx> {
)
}
Expr::Match { expr, arms } => {
- let Some((cond_place, mut current)) = self.lower_expr_as_place(current, *expr, true)?
+ let Some((cond_place, mut current)) =
+ self.lower_expr_as_place(current, *expr, true)?
else {
return Ok(None);
};
let mut end = None;
for MatchArm { pat, guard, expr } in arms.iter() {
- let (then, mut otherwise) = self.pattern_match(
- current,
- None,
- cond_place.clone(),
- *pat,
- )?;
+ let (then, mut otherwise) =
+ self.pattern_match(current, None, cond_place.clone(), *pat)?;
let then = if let &Some(guard) = guard {
let next = self.new_basic_block();
let o = otherwise.get_or_insert_with(|| self.new_basic_block());
if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? {
- self.set_terminator(c, TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }, expr_id.into());
+ self.set_terminator(
+ c,
+ TerminatorKind::SwitchInt {
+ discr,
+ targets: SwitchTargets::static_if(1, next, *o),
+ },
+ expr_id.into(),
+ );
}
next
} else {
@@ -672,33 +702,53 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Continue { label } => {
let loop_data = match label {
- Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?,
- None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?,
+ Some(l) => {
+ self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?
+ }
+ None => self
+ .current_loop_blocks
+ .as_ref()
+ .ok_or(MirLowerError::ContinueWithoutLoop)?,
};
let begin = loop_data.begin;
current = self.drop_until_scope(loop_data.drop_scope_index, current);
self.set_goto(current, begin, expr_id.into());
Ok(None)
- },
+ }
&Expr::Break { expr, label } => {
if let Some(expr) = expr {
let loop_data = match label {
- Some(l) => self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?,
- None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::BreakWithoutLoop)?,
+ Some(l) => self
+ .labeled_loop_blocks
+ .get(&l)
+ .ok_or(MirLowerError::UnresolvedLabel)?,
+ None => self
+ .current_loop_blocks
+ .as_ref()
+ .ok_or(MirLowerError::BreakWithoutLoop)?,
};
- let Some(c) = self.lower_expr_to_place(expr, loop_data.place.clone(), current)? else {
+ let Some(c) =
+ self.lower_expr_to_place(expr, loop_data.place.clone(), current)?
+ else {
return Ok(None);
};
current = c;
}
let (end, drop_scope) = match label {
Some(l) => {
- let loop_blocks = self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?;
- (loop_blocks.end.expect("We always generate end for labeled loops"), loop_blocks.drop_scope_index)
- },
- None => {
- (self.current_loop_end()?, self.current_loop_blocks.as_ref().unwrap().drop_scope_index)
- },
+ let loop_blocks = self
+ .labeled_loop_blocks
+ .get(&l)
+ .ok_or(MirLowerError::UnresolvedLabel)?;
+ (
+ loop_blocks.end.expect("We always generate end for labeled loops"),
+ loop_blocks.drop_scope_index,
+ )
+ }
+ None => (
+ self.current_loop_end()?,
+ self.current_loop_blocks.as_ref().unwrap().drop_scope_index,
+ ),
};
current = self.drop_until_scope(drop_scope, current);
self.set_goto(current, end, expr_id.into());
@@ -706,7 +756,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Return { expr } => {
if let Some(expr) = expr {
- if let Some(c) = self.lower_expr_to_place(*expr, return_slot().into(), current)? {
+ if let Some(c) =
+ self.lower_expr_to_place(*expr, return_slot().into(), current)?
+ {
current = c;
} else {
return Ok(None);
@@ -719,19 +771,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Yield { .. } => not_supported!("yield"),
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
let spread_place = match spread {
- &Some(x) => {
- let Some((p, c)) = self.lower_expr_as_place(current, x, true)? else {
+ &Some(it) => {
+ let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
return Ok(None);
};
current = c;
Some(p)
- },
+ }
None => None,
};
- let variant_id = self
- .infer
- .variant_resolution_for_expr(expr_id)
- .ok_or_else(|| match path {
+ let variant_id =
+ self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
None => MirLowerError::RecordLiteralWithoutPath,
})?;
@@ -746,7 +796,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
for RecordLitField { name, expr } in fields.iter() {
let field_id =
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
- let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)?
+ else {
return Ok(None);
};
current = c;
@@ -758,18 +809,23 @@ impl<'ctx> MirLowerCtx<'ctx> {
Rvalue::Aggregate(
AggregateKind::Adt(variant_id, subst),
match spread_place {
- Some(sp) => operands.into_iter().enumerate().map(|(i, x)| {
- match x {
- Some(x) => x,
+ Some(sp) => operands
+ .into_iter()
+ .enumerate()
+ .map(|(i, it)| match it {
+ Some(it) => it,
None => {
- let p = sp.project(ProjectionElem::Field(FieldId {
- parent: variant_id,
- local_id: LocalFieldId::from_raw(RawIdx::from(i as u32)),
- }));
+ let p =
+ sp.project(ProjectionElem::Field(FieldId {
+ parent: variant_id,
+ local_id: LocalFieldId::from_raw(
+ RawIdx::from(i as u32),
+ ),
+ }));
Operand::Copy(p)
- },
- }
- }).collect(),
+ }
+ })
+ .collect(),
None => operands.into_iter().collect::<Option<_>>().ok_or(
MirLowerError::TypeError("missing field in record literal"),
)?,
@@ -785,7 +841,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
let local_id =
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
- let place = place.project(PlaceElem::Field(FieldId { parent: union_id.into(), local_id }));
+ let place = place.project(PlaceElem::Field(FieldId {
+ parent: union_id.into(),
+ local_id,
+ }));
self.lower_expr_to_place(*expr, place, current)
}
}
@@ -795,11 +854,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Async { .. } => not_supported!("async block"),
&Expr::Const(id) => {
let subst = self.placeholder_subst();
- self.lower_const(id.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
+ self.lower_const(
+ id.into(),
+ current,
+ place,
+ subst,
+ expr_id.into(),
+ self.expr_ty_without_adjust(expr_id),
+ )?;
Ok(Some(current))
- },
+ }
Expr::Cast { expr, type_ref: _ } => {
- let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
return Ok(None);
};
let source_ty = self.infer[*expr].clone();
@@ -807,7 +873,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(
current,
place,
- Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty),
+ Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, it, target_ty),
expr_id.into(),
);
Ok(Some(current))
@@ -822,23 +888,37 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Box { expr } => {
let ty = self.expr_ty_after_adjustments(*expr);
- self.push_assignment(current, place.clone(), Rvalue::ShallowInitBoxWithAlloc(ty), expr_id.into());
- let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ self.push_assignment(
+ current,
+ place.clone(),
+ Rvalue::ShallowInitBoxWithAlloc(ty),
+ expr_id.into(),
+ );
+ let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
+ else {
return Ok(None);
};
let p = place.project(ProjectionElem::Deref);
self.push_assignment(current, p, operand.into(), expr_id.into());
Ok(Some(current))
- },
- Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
- let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else {
+ }
+ Expr::Field { .. }
+ | Expr::Index { .. }
+ | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
+ let Some((p, current)) =
+ self.lower_expr_as_place_without_adjust(current, expr_id, true)?
+ else {
return Ok(None);
};
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
Ok(Some(current))
}
- Expr::UnaryOp { expr, op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg) } => {
- let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ Expr::UnaryOp {
+ expr,
+ op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg),
+ } => {
+ let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
+ else {
return Ok(None);
};
let operation = match op {
@@ -853,7 +933,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
expr_id.into(),
);
Ok(Some(current))
- },
+ }
Expr::BinaryOp { lhs, rhs, op } => {
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
let is_builtin = 'b: {
@@ -861,16 +941,19 @@ impl<'ctx> MirLowerCtx<'ctx> {
// for binary operator, and use without adjust to simplify our conditions.
let lhs_ty = self.expr_ty_without_adjust(*lhs);
let rhs_ty = self.expr_ty_without_adjust(*rhs);
- if matches!(op ,BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) {
+ if matches!(op, BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) {
if lhs_ty.as_raw_ptr().is_some() && rhs_ty.as_raw_ptr().is_some() {
break 'b true;
}
}
let builtin_inequal_impls = matches!(
op,
- BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr)
+ | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
);
- lhs_ty.is_scalar() && rhs_ty.is_scalar() && (lhs_ty == rhs_ty || builtin_inequal_impls)
+ lhs_ty.is_scalar()
+ && rhs_ty.is_scalar()
+ && (lhs_ty == rhs_ty || builtin_inequal_impls)
};
if !is_builtin {
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
@@ -892,34 +975,34 @@ impl<'ctx> MirLowerCtx<'ctx> {
.infer
.expr_adjustments
.get(lhs)
- .and_then(|x| x.split_last())
- .map(|x| x.1)
- .ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?;
+ .and_then(|it| it.split_last())
+ .map(|it| it.1)
+ .ok_or(MirLowerError::TypeError(
+ "adjustment of binary op was missing",
+ ))?;
let Some((lhs_place, current)) =
self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)?
else {
return Ok(None);
};
- let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ let Some((rhs_op, current)) =
+ self.lower_expr_to_some_operand(*rhs, current)?
+ else {
return Ok(None);
};
- let r_value = Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place.clone()), rhs_op);
+ let r_value = Rvalue::CheckedBinaryOp(
+ op.into(),
+ Operand::Copy(lhs_place.clone()),
+ rhs_op,
+ );
self.push_assignment(current, lhs_place, r_value, expr_id.into());
return Ok(Some(current));
} else {
- let Some((lhs_place, current)) =
- self.lower_expr_as_place(current, *lhs, false)?
- else {
- return Ok(None);
- };
- let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
- return Ok(None);
- };
- self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
- return Ok(Some(current));
+ return self.lower_assignment(current, *lhs, *rhs, expr_id.into());
}
}
- let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
+ let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)?
+ else {
return Ok(None);
};
if let hir_def::hir::BinaryOp::LogicOp(op) = op {
@@ -928,22 +1011,31 @@ impl<'ctx> MirLowerCtx<'ctx> {
syntax::ast::LogicOp::Or => 1,
};
let start_of_then = self.new_basic_block();
- self.push_assignment(start_of_then, place.clone(), lhs_op.clone().into(), expr_id.into());
+ self.push_assignment(
+ start_of_then,
+ place.clone(),
+ lhs_op.clone().into(),
+ expr_id.into(),
+ );
let end_of_then = Some(start_of_then);
let start_of_else = self.new_basic_block();
- let end_of_else =
- self.lower_expr_to_place(*rhs, place, start_of_else)?;
+ let end_of_else = self.lower_expr_to_place(*rhs, place, start_of_else)?;
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: lhs_op,
- targets: SwitchTargets::static_if(value_to_short, start_of_then, start_of_else),
+ targets: SwitchTargets::static_if(
+ value_to_short,
+ start_of_then,
+ start_of_else,
+ ),
},
expr_id.into(),
);
return Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()));
}
- let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)?
+ else {
return Ok(None);
};
self.push_assignment(
@@ -976,15 +1068,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
let mut lp = None;
let mut rp = None;
- if let Some(x) = lhs {
- let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
+ if let Some(it) = lhs {
+ let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
return Ok(None);
};
lp = Some(o);
current = c;
}
- if let Some(x) = rhs {
- let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
+ if let Some(it) = rhs {
+ let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
return Ok(None);
};
rp = Some(o);
@@ -995,20 +1087,28 @@ impl<'ctx> MirLowerCtx<'ctx> {
place,
Rvalue::Aggregate(
AggregateKind::Adt(st.into(), subst.clone()),
- self.db.struct_data(st).variant_data.fields().iter().map(|x| {
- let o = match x.1.name.as_str() {
- Some("start") => lp.take(),
- Some("end") => rp.take(),
- Some("exhausted") => Some(Operand::from_bytes(vec![0], TyBuilder::bool())),
- _ => None,
- };
- o.ok_or(MirLowerError::UnresolvedField)
- }).collect::<Result<_>>()?,
+ self.db
+ .struct_data(st)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|it| {
+ let o = match it.1.name.as_str() {
+ Some("start") => lp.take(),
+ Some("end") => rp.take(),
+ Some("exhausted") => {
+ Some(Operand::from_bytes(vec![0], TyBuilder::bool()))
+ }
+ _ => None,
+ };
+ o.ok_or(MirLowerError::UnresolvedField)
+ })
+ .collect::<Result<_>>()?,
),
expr_id.into(),
);
Ok(Some(current))
- },
+ }
Expr::Closure { .. } => {
let ty = self.expr_ty_without_adjust(expr_id);
let TyKind::Closure(id, _) = ty.kind(Interner) else {
@@ -1020,22 +1120,33 @@ impl<'ctx> MirLowerCtx<'ctx> {
for capture in captures.iter() {
let p = Place {
local: self.binding_local(capture.place.local)?,
- projection: capture.place.projections.clone().into_iter().map(|x| {
- match x {
+ projection: capture
+ .place
+ .projections
+ .clone()
+ .into_iter()
+ .map(|it| match it {
ProjectionElem::Deref => ProjectionElem::Deref,
- ProjectionElem::Field(x) => ProjectionElem::Field(x),
- ProjectionElem::TupleOrClosureField(x) => ProjectionElem::TupleOrClosureField(x),
- ProjectionElem::ConstantIndex { offset, from_end } => ProjectionElem::ConstantIndex { offset, from_end },
- ProjectionElem::Subslice { from, to } => ProjectionElem::Subslice { from, to },
- ProjectionElem::OpaqueCast(x) => ProjectionElem::OpaqueCast(x),
- ProjectionElem::Index(x) => match x { },
- }
- }).collect(),
+ ProjectionElem::Field(it) => ProjectionElem::Field(it),
+ ProjectionElem::TupleOrClosureField(it) => {
+ ProjectionElem::TupleOrClosureField(it)
+ }
+ ProjectionElem::ConstantIndex { offset, from_end } => {
+ ProjectionElem::ConstantIndex { offset, from_end }
+ }
+ ProjectionElem::Subslice { from, to } => {
+ ProjectionElem::Subslice { from, to }
+ }
+ ProjectionElem::OpaqueCast(it) => ProjectionElem::OpaqueCast(it),
+ ProjectionElem::Index(it) => match it {},
+ })
+ .collect(),
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let placeholder_subst = self.placeholder_subst();
- let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst);
+ let tmp_ty =
+ capture.ty.clone().substitute(Interner, &placeholder_subst);
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
self.push_assignment(
current,
@@ -1044,7 +1155,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
capture.span,
);
operands.push(Operand::Move(tmp));
- },
+ }
CaptureKind::ByValue => operands.push(Operand::Move(p)),
}
}
@@ -1055,18 +1166,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
expr_id.into(),
);
Ok(Some(current))
- },
+ }
Expr::Tuple { exprs, is_assignee_expr: _ } => {
let Some(values) = exprs
- .iter()
- .map(|x| {
- let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
- return Ok(None);
- };
- current = c;
- Ok(Some(o))
- })
- .collect::<Result<Option<_>>>()?
+ .iter()
+ .map(|it| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
else {
return Ok(None);
};
@@ -1079,7 +1190,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Array(l) => match l {
Array::ElementList { elements, .. } => {
- let elem_ty = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
+ let elem_ty = match &self.expr_ty_without_adjust(expr_id).kind(Interner) {
TyKind::Array(ty, _) => ty.clone(),
_ => {
return Err(MirLowerError::TypeError(
@@ -1088,30 +1199,29 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
};
let Some(values) = elements
- .iter()
- .map(|x| {
- let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
- return Ok(None);
- };
- current = c;
- Ok(Some(o))
- })
- .collect::<Result<Option<_>>>()?
+ .iter()
+ .map(|it| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
else {
return Ok(None);
};
- let r = Rvalue::Aggregate(
- AggregateKind::Array(elem_ty),
- values,
- );
+ let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values);
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
}
Array::Repeat { initializer, .. } => {
- let Some((init, current)) = self.lower_expr_to_some_operand(*initializer, current)? else {
+ let Some((init, current)) =
+ self.lower_expr_to_some_operand(*initializer, current)?
+ else {
return Ok(None);
};
- let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
+ let len = match &self.expr_ty_without_adjust(expr_id).kind(Interner) {
TyKind::Array(_, len) => len.clone(),
_ => {
return Err(MirLowerError::TypeError(
@@ -1122,7 +1232,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let r = Rvalue::Repeat(init, len);
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
- },
+ }
},
Expr::Literal(l) => {
let ty = self.expr_ty_without_adjust(expr_id);
@@ -1134,9 +1244,33 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
+ fn lower_assignment(
+ &mut self,
+ current: BasicBlockId,
+ lhs: ExprId,
+ rhs: ExprId,
+ span: MirSpan,
+ ) -> Result<Option<BasicBlockId>> {
+ let Some((rhs_op, current)) =
+ self.lower_expr_to_some_operand(rhs, current)?
+ else {
+ return Ok(None);
+ };
+ if matches!(&self.body.exprs[lhs], Expr::Underscore) {
+ return Ok(Some(current));
+ }
+ let Some((lhs_place, current)) =
+ self.lower_expr_as_place(current, lhs, false)?
+ else {
+ return Ok(None);
+ };
+ self.push_assignment(current, lhs_place, rhs_op.into(), span);
+ Ok(Some(current))
+ }
+
fn placeholder_subst(&mut self) -> Substitution {
let placeholder_subst = match self.owner.as_generic_def_id() {
- Some(x) => TyBuilder::placeholder_subst(self.db, x),
+ Some(it) => TyBuilder::placeholder_subst(self.db, it),
None => Substitution::empty(Interner),
};
placeholder_subst
@@ -1192,7 +1326,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size = self
.db
- .layout_of_ty(ty.clone(), self.owner.module(self.db.upcast()).krate())?
+ .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))?
.size
.bytes_usize();
let bytes = match l {
@@ -1206,7 +1340,6 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(Operand::from_concrete_const(data, mm, ty));
}
hir_def::hir::Literal::CString(b) => {
- let b = b.as_bytes();
let bytes = b.iter().copied().chain(iter::once(0)).collect::<Vec<_>>();
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2);
@@ -1226,8 +1359,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
hir_def::hir::Literal::Bool(b) => vec![*b as u8],
- hir_def::hir::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(),
- hir_def::hir::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(),
+ hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(),
+ hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(),
hir_def::hir::Literal::Float(f, _) => match size {
8 => f.into_f64().to_le_bytes().into(),
4 => f.into_f32().to_le_bytes().into(),
@@ -1269,7 +1402,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
} else {
let name = const_id.name(self.db.upcast());
self.db
- .const_eval(const_id.into(), subst)
+ .const_eval(const_id.into(), subst, None)
.map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))?
};
Ok(Operand::Constant(c))
@@ -1377,9 +1510,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None;
- if let Some(x) = self.infer.expr_adjustments.get(&e) {
- if let Some(x) = x.last() {
- ty = Some(x.target.clone());
+ if let Some(it) = self.infer.expr_adjustments.get(&e) {
+ if let Some(it) = it.last() {
+ ty = Some(it.target.clone());
}
}
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
@@ -1401,7 +1534,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
match &self.discr_temp {
- Some(x) => x.clone(),
+ Some(it) => it.clone(),
None => {
let tmp: Place = self
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
@@ -1448,7 +1581,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn has_adjustments(&self, expr_id: ExprId) -> bool {
- !self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true)
+ !self.infer.expr_adjustments.get(&expr_id).map(|it| it.is_empty()).unwrap_or(true)
}
fn merge_blocks(
@@ -1478,7 +1611,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
))?
.end
{
- Some(x) => x,
+ Some(it) => it,
None => {
let s = self.new_basic_block();
self.current_loop_blocks
@@ -1602,10 +1735,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
pick_binding: impl Fn(BindingId) -> bool,
) -> Result<BasicBlockId> {
let base_param_count = self.result.param_locals.len();
- self.result.param_locals.extend(params.clone().map(|(x, ty)| {
+ self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
- if let Pat::Bind { id, subpat: None } = self.body[x] {
+ if let Pat::Bind { id, subpat: None } = self.body[it] {
if matches!(
self.body.bindings[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
@@ -1646,7 +1779,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn binding_local(&self, b: BindingId) -> Result<LocalId> {
match self.result.binding_locals.get(b) {
- Some(x) => Ok(*x),
+ Some(it) => Ok(*it),
None => {
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
// is a hir lowering problem IMO.
@@ -1731,6 +1864,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
+ (TyKind::FnDef(..), TyKind::Function(_)) => CastKind::Pointer(PointerCast::ReifyFnPointer),
(TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
(chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
(chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
@@ -1742,17 +1876,17 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
(TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => {
CastKind::Pointer(if a == b {
PointerCast::MutToConstPointer
- } else if matches!(a.kind(Interner), TyKind::Slice(_) | TyKind::Str)
- && matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Str)
+ } else if matches!(b.kind(Interner), TyKind::Slice(_))
+ && matches!(a.kind(Interner), TyKind::Array(_, _))
+ || matches!(b.kind(Interner), TyKind::Dyn(_))
{
- // slice to slice cast is no-op (metadata is not touched), so we use this
- PointerCast::MutToConstPointer
- } else if matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
PointerCast::Unsize
} else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) {
PointerCast::ArrayToPointer
} else {
- // cast between two sized pointer, like *const i32 to *const i8. There is no specific variant
+ // cast between two sized pointer, like *const i32 to *const i8, or two unsized pointer, like
+ // slice to slice, slice to str, ... . These are no-ops (even in the unsized case, no metadata
+ // will be touched) but there is no specific variant
// for it in `PointerCast` so we use `MutToConstPointer`
PointerCast::MutToConstPointer
})
@@ -1796,7 +1930,7 @@ pub fn mir_body_for_closure_query(
implementation_error!("closure has not callable sig");
};
let current = ctx.lower_params_and_bindings(
- args.iter().zip(sig.params().iter()).map(|(x, y)| (*x, y.clone())),
+ args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
|_| true,
)?;
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
@@ -1815,34 +1949,35 @@ pub fn mir_body_for_closure_query(
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
};
ctx.result.walk_places(|p| {
- if let Some(x) = upvar_map.get(&p.local) {
- let r = x.iter().find(|x| {
- if p.projection.len() < x.0.place.projections.len() {
+ if let Some(it) = upvar_map.get(&p.local) {
+ let r = it.iter().find(|it| {
+ if p.projection.len() < it.0.place.projections.len() {
return false;
}
- for (x, y) in p.projection.iter().zip(x.0.place.projections.iter()) {
- match (x, y) {
+ for (it, y) in p.projection.iter().zip(it.0.place.projections.iter()) {
+ match (it, y) {
(ProjectionElem::Deref, ProjectionElem::Deref) => (),
- (ProjectionElem::Field(x), ProjectionElem::Field(y)) if x == y => (),
+ (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
(
- ProjectionElem::TupleOrClosureField(x),
+ ProjectionElem::TupleOrClosureField(it),
ProjectionElem::TupleOrClosureField(y),
- ) if x == y => (),
+ ) if it == y => (),
_ => return false,
}
}
true
});
match r {
- Some(x) => {
+ Some(it) => {
p.local = closure_local;
let mut next_projs = closure_projection.clone();
- next_projs.push(PlaceElem::TupleOrClosureField(x.1));
+ next_projs.push(PlaceElem::TupleOrClosureField(it.1));
let prev_projs = mem::take(&mut p.projection);
- if x.0.kind != CaptureKind::ByValue {
+ if it.0.kind != CaptureKind::ByValue {
next_projs.push(ProjectionElem::Deref);
}
- next_projs.extend(prev_projs.iter().cloned().skip(x.0.place.projections.len()));
+ next_projs
+ .extend(prev_projs.iter().cloned().skip(it.0.place.projections.len()));
p.projection = next_projs.into();
}
None => err = Some(p.clone()),
@@ -1902,8 +2037,8 @@ pub fn lower_to_mir(
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
- if let Some((_, x)) = infer.type_mismatches().next() {
- return Err(MirLowerError::TypeMismatch(x.clone()));
+ if let Some((_, it)) = infer.type_mismatches().next() {
+ return Err(MirLowerError::TypeMismatch(it.clone()));
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local
@@ -1929,7 +2064,7 @@ pub fn lower_to_mir(
body.params
.iter()
.zip(callable_sig.params().iter())
- .map(|(x, y)| (*x, y.clone())),
+ .map(|(it, y)| (*it, y.clone())),
binding_picker,
)?;
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
index d2c8d9a08..213f151ab 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
@@ -1,12 +1,12 @@
//! MIR lowering for places
use super::*;
-use hir_def::{lang_item::lang_attr, FunctionId};
+use hir_def::FunctionId;
use hir_expand::name;
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirLowerError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirLowerError::NotSupported(format!($it)))
};
}
@@ -18,7 +18,9 @@ impl MirLowerCtx<'_> {
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty, prev_block, expr_id.into())?;
- let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
+ let Some(current) =
+ self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)?
+ else {
return Ok(None);
};
Ok(Some((place.into(), current)))
@@ -32,10 +34,12 @@ impl MirLowerCtx<'_> {
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = adjustments
.last()
- .map(|x| x.target.clone())
+ .map(|it| it.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty, prev_block, expr_id.into())?;
- let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
+ let Some(current) =
+ self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)?
+ else {
return Ok(None);
};
Ok(Some((place.into(), current)))
@@ -57,16 +61,17 @@ impl MirLowerCtx<'_> {
if let Some((last, rest)) = adjustments.split_last() {
match last.kind {
Adjust::Deref(None) => {
- let Some(mut x) = self.lower_expr_as_place_with_adjust(
+ let Some(mut it) = self.lower_expr_as_place_with_adjust(
current,
expr_id,
upgrade_rvalue,
rest,
- )? else {
+ )?
+ else {
return Ok(None);
};
- x.0 = x.0.project(ProjectionElem::Deref);
- Ok(Some(x))
+ it.0 = it.0.project(ProjectionElem::Deref);
+ Ok(Some(it))
}
Adjust::Deref(Some(od)) => {
let Some((r, current)) = self.lower_expr_as_place_with_adjust(
@@ -74,14 +79,15 @@ impl MirLowerCtx<'_> {
expr_id,
upgrade_rvalue,
rest,
- )? else {
+ )?
+ else {
return Ok(None);
};
self.lower_overloaded_deref(
current,
r,
rest.last()
- .map(|x| x.target.clone())
+ .map(|it| it.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target.clone(),
expr_id.into(),
@@ -156,7 +162,7 @@ impl MirLowerCtx<'_> {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
TyKind::Ref(..) | TyKind::Raw(..) => true,
TyKind::Adt(id, _) => {
- if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) {
+ if let Some(lang_item) = self.db.lang_attr(id.0.into()) {
lang_item == LangItem::OwnedBox
} else {
false
@@ -165,7 +171,8 @@ impl MirLowerCtx<'_> {
_ => false,
};
if !is_builtin {
- let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
return Ok(None);
};
return self.lower_overloaded_deref(
@@ -192,7 +199,8 @@ impl MirLowerCtx<'_> {
},
);
}
- let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
return Ok(None);
};
r = r.project(ProjectionElem::Deref);
@@ -217,12 +225,18 @@ impl MirLowerCtx<'_> {
)
{
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
- return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string()));
+ return Err(MirLowerError::UnresolvedMethod(
+ "[overloaded index]".to_string(),
+ ));
};
- let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else {
+ let Some((base_place, current)) =
+ self.lower_expr_as_place(current, *base, true)?
+ else {
return Ok(None);
};
- let Some((index_operand, current)) = self.lower_expr_to_some_operand(*index, current)? else {
+ let Some((index_operand, current)) =
+ self.lower_expr_to_some_operand(*index, current)?
+ else {
return Ok(None);
};
return self.lower_overloaded_index(
@@ -239,8 +253,8 @@ impl MirLowerCtx<'_> {
.infer
.expr_adjustments
.get(base)
- .and_then(|x| x.split_last())
- .map(|x| x.1)
+ .and_then(|it| it.split_last())
+ .map(|it| it.1)
.unwrap_or(&[]);
let Some((mut p_base, current)) =
self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)?
@@ -249,7 +263,8 @@ impl MirLowerCtx<'_> {
};
let l_index =
self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?;
- let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
+ let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)?
+ else {
return Ok(None);
};
p_base = p_base.project(ProjectionElem::Index(l_index));
@@ -282,7 +297,15 @@ impl MirLowerCtx<'_> {
)
.intern(Interner),
);
- let Some(current) = self.lower_call(index_fn_op, Box::new([Operand::Copy(place), index_operand]), result.clone(), current, false, span)? else {
+ let Some(current) = self.lower_call(
+ index_fn_op,
+ Box::new([Operand::Copy(place), index_operand]),
+ result.clone(),
+ current,
+ false,
+ span,
+ )?
+ else {
return Ok(None);
};
result = result.project(ProjectionElem::Deref);
@@ -329,7 +352,15 @@ impl MirLowerCtx<'_> {
.intern(Interner),
);
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
- let Some(current) = self.lower_call(deref_fn_op, Box::new([Operand::Copy(ref_place)]), result.clone(), current, false, span)? else {
+ let Some(current) = self.lower_call(
+ deref_fn_op,
+ Box::new([Operand::Copy(ref_place)]),
+ result.clone(),
+ current,
+ false,
+ span,
+ )?
+ else {
return Ok(None);
};
result = result.project(ProjectionElem::Deref);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
index ff43c64a9..3354cbd76 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -307,6 +307,11 @@ impl MirLowerCtx<'_> {
mode,
)?,
None => {
+ // The path is not a variant, so it is a const
+ if mode != MatchingMode::Check {
+ // A const don't bind anything. Only needs check.
+ return Ok((current, current_else));
+ }
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
@@ -362,8 +367,8 @@ impl MirLowerCtx<'_> {
},
Pat::Lit(l) => match &self.body.exprs[*l] {
Expr::Literal(l) => {
- let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
if mode == MatchingMode::Check {
+ let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
} else {
(current, current_else)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index ce3f7a8e5..c565228d9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -13,15 +13,14 @@ use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
};
-use hir_def::{DefWithBodyId, GeneralConstId};
+use hir_def::DefWithBodyId;
use triomphe::Arc;
use crate::{
- consteval::unknown_const,
+ consteval::{intern_const_scalar, unknown_const},
db::HirDatabase,
from_placeholder_idx,
infer::normalize,
- method_resolution::lookup_impl_const,
utils::{generics, Generics},
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
};
@@ -29,8 +28,8 @@ use crate::{
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirLowerError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirLowerError::NotSupported(format!($it)))
};
}
@@ -97,16 +96,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
- let x = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
+ let it = from_placeholder_idx(self.db, idx);
+ let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
- .and_then(|x| x.constant(Interner))
- .ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
+ .and_then(|it| it.constant(Interner))
+ .ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
.clone())
}
@@ -115,16 +114,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
- let x = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
+ let it = from_placeholder_idx(self.db, idx);
+ let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
- .and_then(|x| x.ty(Interner))
- .ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
+ .and_then(|it| it.ty(Interner))
+ .ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
.clone())
}
@@ -180,7 +179,7 @@ impl Filler<'_> {
MirLowerError::GenericArgNotProvided(
self.generics
.as_ref()
- .and_then(|x| x.iter().nth(b.index))
+ .and_then(|it| it.iter().nth(b.index))
.unwrap()
.0,
self.subst.clone(),
@@ -193,25 +192,12 @@ impl Filler<'_> {
| chalk_ir::ConstValue::Placeholder(_) => {}
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
- let mut const_id = *const_id;
let mut subst = subst.clone();
self.fill_subst(&mut subst)?;
- if let GeneralConstId::ConstId(c) = const_id {
- let (c, s) = lookup_impl_const(
- self.db,
- self.db.trait_environment_for_body(self.owner),
- c,
- subst,
- );
- const_id = GeneralConstId::ConstId(c);
- subst = s;
- }
- let result =
- self.db.const_eval(const_id.into(), subst).map_err(|e| {
- let name = const_id.name(self.db.upcast());
- MirLowerError::ConstEvalError(name, Box::new(e))
- })?;
- *c = result;
+ *c = intern_const_scalar(
+ crate::ConstScalar::UnevaluatedConst(*const_id, subst),
+ c.data(Interner).ty.clone(),
+ );
}
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
},
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
index ac23e77bd..781ffaeca 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -135,7 +135,7 @@ impl<'a> MirPrettyCtx<'a> {
fn for_closure(&mut self, closure: ClosureId) {
let body = match self.db.mir_body_for_closure(closure) {
- Ok(x) => x,
+ Ok(it) => it,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
return;
@@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
- local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
+ local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
result,
indent,
..*self
@@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
}
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
- let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
+ let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
MirPrettyCtx {
body,
db,
@@ -315,17 +315,17 @@ impl<'a> MirPrettyCtx<'a> {
}
}
}
- ProjectionElem::TupleOrClosureField(x) => {
+ ProjectionElem::TupleOrClosureField(it) => {
f(this, local, head);
- w!(this, ".{}", x);
+ w!(this, ".{}", it);
}
ProjectionElem::Index(l) => {
f(this, local, head);
w!(this, "[{}]", this.local_name(*l).display(this.db));
}
- x => {
+ it => {
f(this, local, head);
- w!(this, ".{:?}", x);
+ w!(this, ".{:?}", it);
}
}
}
@@ -356,14 +356,14 @@ impl<'a> MirPrettyCtx<'a> {
}
self.place(p);
}
- Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
+ Rvalue::Aggregate(AggregateKind::Tuple(_), it) => {
w!(self, "(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
- Rvalue::Aggregate(AggregateKind::Array(_), x) => {
+ Rvalue::Aggregate(AggregateKind::Array(_), it) => {
w!(self, "[");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, "]");
}
Rvalue::Repeat(op, len) => {
@@ -371,19 +371,19 @@ impl<'a> MirPrettyCtx<'a> {
self.operand(op);
w!(self, "; {}]", len.display(self.db));
}
- Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
+ Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
w!(self, "Adt(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
- Rvalue::Aggregate(AggregateKind::Closure(_), x) => {
+ Rvalue::Aggregate(AggregateKind::Closure(_), it) => {
w!(self, "Closure(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
- Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
+ Rvalue::Aggregate(AggregateKind::Union(_, _), it) => {
w!(self, "Union(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
Rvalue::Len(p) => {
@@ -428,8 +428,8 @@ impl<'a> MirPrettyCtx<'a> {
}
}
- fn operand_list(&mut self, x: &[Operand]) {
- let mut it = x.iter();
+ fn operand_list(&mut self, it: &[Operand]) {
+ let mut it = it.iter();
if let Some(first) = it.next() {
self.operand(first);
for op in it {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index 857141280..d22d0d85c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -30,7 +30,7 @@ use syntax::{
ast::{self, AstNode, HasName},
SyntaxNode,
};
-use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
+use tracing_subscriber::{layer::SubscriberExt, Registry};
use tracing_tree::HierarchicalLayer;
use triomphe::Arc;
@@ -52,7 +52,8 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
return None;
}
- let filter = EnvFilter::from_env("CHALK_DEBUG");
+ let filter: tracing_subscriber::filter::Targets =
+ env::var("CHALK_DEBUG").ok().and_then(|it| it.parse().ok()).unwrap_or_default();
let layer = HierarchicalLayer::default()
.with_indent_lines(true)
.with_ansi(false)
@@ -205,7 +206,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let Some(node) = (match expr_or_pat {
hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
- }) else { continue; };
+ }) else {
+ continue;
+ };
let range = node.as_ref().original_file_range(&db);
let actual = format!(
"expected {}, got {}",
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
index 425432479..e75b037e3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
@@ -227,3 +227,22 @@ fn f(a: impl Foo<i8, Assoc<i16> = i32>) {
"#,
);
}
+
+#[test]
+fn fn_def_is_shown_as_fn_ptr() {
+ check_types_source_code(
+ r#"
+fn foo(_: i32) -> i64 { 42 }
+struct S<T>(T);
+enum E { A(usize) }
+fn test() {
+ let f = foo;
+ //^ fn(i32) -> i64
+ let f = S::<i8>;
+ //^ fn(i8) -> S<i8>
+ let f = E::A;
+ //^ fn(usize) -> E
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
index 111ac0b61..1e6e946a1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -202,13 +202,15 @@ fn expr_macro_def_expanded_in_various_places() {
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
- 100..119 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
+ 100..119 'for _ ...!() {}': Option<IntoIterator::Item<isize>>
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
- 104..105 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
+ 104..105 '_': IntoIterator::Item<isize>
117..119 '{}': ()
124..134 '|| spam!()': impl Fn() -> isize
+ 140..156 'while ...!() {}': !
+ 140..156 'while ...!() {}': ()
140..156 'while ...!() {}': ()
154..156 '{}': ()
161..174 'break spam!()': !
@@ -293,13 +295,15 @@ fn expr_macro_rules_expanded_in_various_places() {
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
- 114..133 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
+ 114..133 'for _ ...!() {}': Option<IntoIterator::Item<isize>>
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
- 118..119 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
+ 118..119 '_': IntoIterator::Item<isize>
131..133 '{}': ()
138..148 '|| spam!()': impl Fn() -> isize
+ 154..170 'while ...!() {}': !
+ 154..170 'while ...!() {}': ()
154..170 'while ...!() {}': ()
168..170 '{}': ()
175..188 'break spam!()': !
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
index 1e57a4ae2..c837fae3f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -1216,6 +1216,73 @@ fn main() {
}
#[test]
+fn inherent_method_deref_raw() {
+ check_types(
+ r#"
+struct Val;
+
+impl Val {
+ pub fn method(self: *const Val) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let foo: *const Val;
+ foo.method();
+ // ^^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn inherent_method_ref_self_deref_raw() {
+ check_types(
+ r#"
+struct Val;
+
+impl Val {
+ pub fn method(&self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let foo: *const Val;
+ foo.method();
+ // ^^^^^^^^^^^^ {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_method_deref_raw() {
+ check_types(
+ r#"
+trait Trait {
+ fn method(self: *const Self) -> u32;
+}
+
+struct Val;
+
+impl Trait for Val {
+ fn method(self: *const Self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let foo: *const Val;
+ foo.method();
+ // ^^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
fn method_on_dyn_impl() {
check_types(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
index 59046c043..5d809b823 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
@@ -412,17 +412,23 @@ fn diverging_expression_3_break() {
355..654 '{ ...; }; }': ()
398..399 'x': u32
407..433 '{ whil...; }; }': u32
+ 409..430 'while ...eak; }': !
+ 409..430 'while ...eak; }': ()
409..430 'while ...eak; }': ()
415..419 'true': bool
420..430 '{ break; }': ()
422..427 'break': !
537..538 'x': u32
546..564 '{ whil... {}; }': u32
+ 548..561 'while true {}': !
+ 548..561 'while true {}': ()
548..561 'while true {}': ()
554..558 'true': bool
559..561 '{}': ()
615..616 'x': u32
624..651 '{ whil...; }; }': u32
+ 626..648 'while ...urn; }': !
+ 626..648 'while ...urn; }': ()
626..648 'while ...urn; }': ()
632..636 'true': bool
637..648 '{ return; }': ()
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index 047900a32..6ea059065 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -1240,11 +1240,11 @@ fn test() {
16..66 'for _ ... }': IntoIterator::IntoIter<()>
16..66 'for _ ... }': &mut IntoIterator::IntoIter<()>
16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item>
- 16..66 'for _ ... }': Option<Iterator::Item<IntoIterator::IntoIter<()>>>
+ 16..66 'for _ ... }': Option<IntoIterator::Item<()>>
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
- 20..21 '_': Iterator::Item<IntoIterator::IntoIter<()>>
+ 20..21 '_': IntoIterator::Item<()>
25..39 '{ let x = 0; }': ()
31..32 'x': i32
35..36 '0': i32
@@ -1267,6 +1267,8 @@ fn test() {
"#,
expect![[r#"
10..59 '{ ... } }': ()
+ 16..57 'while ... }': !
+ 16..57 'while ... }': ()
16..57 'while ... }': ()
22..30 '{ true }': bool
24..28 'true': bool
@@ -1978,3 +1980,23 @@ fn x(a: [i32; 4]) {
"#,
);
}
+
+#[test]
+fn dont_unify_on_casts() {
+ // #15246
+ check_types(
+ r#"
+fn unify(_: [bool; 1]) {}
+fn casted(_: *const bool) {}
+fn default<T>() -> T { loop {} }
+
+fn test() {
+ let foo = default();
+ //^^^ [bool; 1]
+
+ casted(&foo as *const _);
+ unify(foo);
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index a0ff62843..2ad7946c8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -3513,7 +3513,6 @@ fn func() {
);
}
-// FIXME
#[test]
fn castable_to() {
check_infer(
@@ -3538,10 +3537,10 @@ fn func() {
120..122 '{}': ()
138..184 '{ ...0]>; }': ()
148..149 'x': Box<[i32; 0]>
- 152..160 'Box::new': fn new<[{unknown}; 0]>([{unknown}; 0]) -> Box<[{unknown}; 0]>
- 152..164 'Box::new([])': Box<[{unknown}; 0]>
+ 152..160 'Box::new': fn new<[i32; 0]>([i32; 0]) -> Box<[i32; 0]>
+ 152..164 'Box::new([])': Box<[i32; 0]>
152..181 'Box::n...2; 0]>': Box<[i32; 0]>
- 161..163 '[]': [{unknown}; 0]
+ 161..163 '[]': [i32; 0]
"#]],
);
}
@@ -3578,6 +3577,21 @@ fn f<T>(t: Ark<T>) {
}
#[test]
+fn ref_to_array_to_ptr_cast() {
+ check_types(
+ r#"
+fn default<T>() -> T { loop {} }
+fn foo() {
+ let arr = [default()];
+ //^^^ [i32; 1]
+ let ref_to_arr = &arr;
+ let casted = ref_to_arr as *const i32;
+}
+"#,
+ );
+}
+
+#[test]
fn const_dependent_on_local() {
check_types(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index 97ae732a9..542df8b34 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -4149,6 +4149,30 @@ where
}
#[test]
+fn gats_in_bounds_for_assoc() {
+ check_types(
+ r#"
+trait Trait {
+ type Assoc: Another<Gat<i32> = usize>;
+ type Assoc2<T>: Another<Gat<T> = T>;
+}
+trait Another {
+ type Gat<T>;
+ fn foo(&self) -> Self::Gat<i32>;
+ fn bar<T>(&self) -> Self::Gat<T>;
+}
+
+fn test<T: Trait>(a: T::Assoc, b: T::Assoc2<isize>) {
+ let v = a.foo();
+ //^ usize
+ let v = b.bar::<isize>();
+ //^ isize
+}
+"#,
+ );
+}
+
+#[test]
fn bin_op_with_scalar_fallback() {
// Extra impls are significant so that chalk doesn't give us definite guidances.
check_types(
@@ -4410,3 +4434,47 @@ fn test(v: S<i32>) {
"#,
);
}
+
+#[test]
+fn associated_type_in_argument() {
+ check(
+ r#"
+ trait A {
+ fn m(&self) -> i32;
+ }
+
+ fn x<T: B>(k: &<T as B>::Ty) {
+ k.m();
+ }
+
+ struct X;
+ struct Y;
+
+ impl A for X {
+ fn m(&self) -> i32 {
+ 8
+ }
+ }
+
+ impl A for Y {
+ fn m(&self) -> i32 {
+ 32
+ }
+ }
+
+ trait B {
+ type Ty: A;
+ }
+
+ impl B for u16 {
+ type Ty = X;
+ }
+
+ fn ttt() {
+ let inp = Y;
+ x::<u16>(&inp);
+ //^^^^ expected &X, got &Y
+ }
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index f40b7db3a..3c7cfbaed 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -170,7 +170,7 @@ fn solve(
struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
-impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> {
+impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
fn drop(&mut self) {
eprintln!("chalk program:\n{}", self.0);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 363658063..75b8b9afa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -28,14 +28,15 @@ use intern::Interned;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
use stdx::never;
+use triomphe::Arc;
use crate::{
consteval::unknown_const,
db::HirDatabase,
layout::{Layout, TagEncoding},
mir::pad16,
- ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
- Ty, WhereClause,
+ ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitEnvironment,
+ TraitRef, TraitRefExt, Ty, WhereClause,
};
pub(crate) fn fn_traits(
@@ -89,7 +90,7 @@ struct SuperTraits<'a> {
seen: FxHashSet<ChalkTraitId>,
}
-impl<'a> SuperTraits<'a> {
+impl SuperTraits<'_> {
fn elaborate(&mut self, trait_ref: &TraitRef) {
direct_super_trait_refs(self.db, trait_ref, |trait_ref| {
if !self.seen.contains(&trait_ref.trait_id) {
@@ -99,7 +100,7 @@ impl<'a> SuperTraits<'a> {
}
}
-impl<'a> Iterator for SuperTraits<'a> {
+impl Iterator for SuperTraits<'_> {
type Item = TraitRef;
fn next(&mut self) -> Option<Self::Item> {
@@ -417,7 +418,7 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
) -> Result<Const, Self::Error> {
if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value {
if let ConstScalar::UnevaluatedConst(id, subst) = &c.interned {
- if let Ok(eval) = self.db.const_eval(*id, subst.clone()) {
+ if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) {
return Ok(eval);
} else {
return Ok(unknown_const(constant.data(Interner).ty.clone()));
@@ -431,10 +432,11 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
pub(crate) fn detect_variant_from_bytes<'a>(
layout: &'a Layout,
db: &dyn HirDatabase,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
b: &[u8],
e: EnumId,
) -> Option<(LocalEnumVariantId, &'a Layout)> {
+ let krate = trait_env.krate;
let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Single { index } => (index.0, &*layout),
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
index a20aff93f..f860ee948 100644
--- a/src/tools/rust-analyzer/crates/hir/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -18,7 +18,7 @@ arrayvec = "0.7.2"
itertools = "0.10.5"
smallvec.workspace = true
triomphe.workspace = true
-once_cell = "1.17.0"
+once_cell = "1.17.1"
# local deps
base-db.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index b81793729..0f2fb2c81 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -12,9 +12,9 @@ use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
use crate::{
- Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
- Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias, TypeParam, Union,
- Variant,
+ Adt, AssocItem, Const, ConstParam, Enum, ExternCrateDecl, Field, Function, GenericParam, Impl,
+ LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias,
+ TypeParam, Union, Variant,
};
pub trait HasAttrs {
@@ -120,6 +120,39 @@ impl HasAttrs for AssocItem {
}
}
+impl HasAttrs for ExternCrateDecl {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ let def = AttrDefId::ExternCrateId(self.into());
+ db.attrs_with_owner(def)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ let crate_docs = self.resolved_crate(db)?.root_module().attrs(db).docs().map(String::from);
+ let def = AttrDefId::ExternCrateId(self.into());
+ let decl_docs = db.attrs(def).docs().map(String::from);
+ match (decl_docs, crate_docs) {
+ (None, None) => None,
+ (Some(decl_docs), None) => Some(decl_docs),
+ (None, Some(crate_docs)) => Some(crate_docs),
+ (Some(mut decl_docs), Some(crate_docs)) => {
+ decl_docs.push('\n');
+ decl_docs.push('\n');
+ decl_docs += &crate_docs;
+ Some(decl_docs)
+ }
+ }
+ .map(Documentation::new)
+ }
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef> {
+ let def = AttrDefId::ExternCrateId(self.into());
+ resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
+ }
+}
+
/// Resolves the item `link` points to in the scope of `def`.
fn resolve_doc_path(
db: &dyn HirDatabase,
@@ -140,7 +173,9 @@ fn resolve_doc_path(
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
+ AttrDefId::UseId(it) => it.resolver(db.upcast()),
AttrDefId::MacroId(it) => it.resolver(db.upcast()),
+ AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::TypeParamId(it) => it.parent(),
GenericParamId::ConstParamId(it) => it.parent(),
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index e0cde689f..936581bfe 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -5,13 +5,8 @@
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_expand::db::{
- AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery,
- InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandQuery,
- ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
+ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
+ ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
+ MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
};
pub use hir_ty::db::*;
-
-#[test]
-fn hir_database_is_object_safe() {
- fn _assert_object_safe(_: &dyn HirDatabase) {}
-}
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index b64d81490..80c3bcdca 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -3,7 +3,7 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
-pub use hir_ty::diagnostics::{IncoherentImpl, IncorrectCase};
+pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index 9a2090ab7..9dfb98e45 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -18,9 +18,9 @@ use hir_ty::{
};
use crate::{
- Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, Field, Function, GenericParam,
- HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Trait, TraitAlias,
- TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
+ Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
+ Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct,
+ Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
};
impl HirDisplay for Function {
@@ -238,6 +238,18 @@ impl HirDisplay for Type {
}
}
+impl HirDisplay for ExternCrateDecl {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("extern crate ")?;
+ write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
+ if let Some(alias) = self.alias(f.db) {
+ write!(f, " as {alias}",)?;
+ }
+ Ok(())
+ }
+}
+
impl HirDisplay for GenericParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
@@ -251,8 +263,8 @@ impl HirDisplay for GenericParam {
impl HirDisplay for TypeOrConstParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self.split(f.db) {
- either::Either::Left(x) => x.hir_fmt(f),
- either::Either::Right(x) => x.hir_fmt(f),
+ either::Either::Left(it) => it.hir_fmt(f),
+ either::Either::Right(it) => it.hir_fmt(f),
}
}
}
@@ -303,11 +315,11 @@ fn write_generic_params(
) -> Result<(), HirDisplayError> {
let params = f.db.generic_params(def);
if params.lifetimes.is_empty()
- && params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
+ && params.type_or_consts.iter().all(|it| it.1.const_param().is_none())
&& params
.type_or_consts
.iter()
- .filter_map(|x| x.1.type_param())
+ .filter_map(|it| it.1.type_param())
.all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
{
return Ok(());
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
index de2390219..fc4bbffdb 100644
--- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -15,7 +15,7 @@ use crate::{
};
macro_rules! from_id {
- ($(($id:path, $ty:path)),*) => {$(
+ ($(($id:path, $ty:path)),* $(,)?) => {$(
impl From<$id> for $ty {
fn from(id: $id) -> $ty {
$ty { id }
@@ -47,7 +47,8 @@ from_id![
(hir_def::TypeParamId, crate::TypeParam),
(hir_def::ConstParamId, crate::ConstParam),
(hir_def::LifetimeParamId, crate::LifetimeParam),
- (hir_def::MacroId, crate::Macro)
+ (hir_def::MacroId, crate::Macro),
+ (hir_def::ExternCrateId, crate::ExternCrateDecl),
];
impl From<AdtId> for Adt {
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
index 9f6b5c0a9..31cf8ba33 100644
--- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -1,18 +1,19 @@
//! Provides set of implementation for hir's objects that allows get back location in file.
+use base_db::FileId;
use either::Either;
use hir_def::{
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _},
Lookup, MacroId, VariantId,
};
-use hir_expand::InFile;
+use hir_expand::{HirFileId, InFile};
use syntax::ast;
use crate::{
- db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam,
- LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam,
- Union, Variant,
+ db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
+ LifetimeParam, LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias,
+ TypeOrConstParam, Union, Variant,
};
pub trait HasSource {
@@ -20,6 +21,10 @@ pub trait HasSource {
/// Fetches the definition's source node.
/// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`],
/// as that caches the parsed file in the semantics' cache.
+ ///
+ /// The current some implementations can return `InFile` instead of `Option<InFile>`.
+ /// But we made this method `Option` to support rlib in the future
+ /// by https://github.com/rust-lang/rust-analyzer/issues/6913
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
}
@@ -32,6 +37,11 @@ impl Module {
def_map[self.id.local_id].definition_source(db.upcast())
}
+ pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].definition_source_file_id()
+ }
+
pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
let def_map = self.id.def_map(db.upcast());
match def_map[self.id.local_id].origin {
@@ -40,6 +50,16 @@ impl Module {
}
}
+ pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<FileId> {
+ let def_map = self.id.def_map(db.upcast());
+ match def_map[self.id.local_id].origin {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => {
+ Some(definition)
+ }
+ _ => None,
+ }
+ }
+
pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].origin.is_inline()
@@ -187,3 +207,11 @@ impl HasSource for LocalSource {
Some(self.source)
}
}
+
+impl HasSource for ExternCrateDecl {
+ type Ast = ast::ExternCrate;
+
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 6df625380..bf041b61f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -47,22 +47,22 @@ use hir_def::{
lang_item::LangItemTarget,
layout::{self, ReprOptions, TargetDataLayout},
macro_id_to_def_id,
- nameres::{self, diagnostics::DefDiagnostic, ModuleOrigin},
+ nameres::{self, diagnostics::DefDiagnostic},
+ path::ImportAlias,
per_ns::PerNs,
resolver::{HasResolver, Resolver},
src::HasSource as _,
- AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
- EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, InTypeConstId, ItemContainerId,
- LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId,
- StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
- UnionId,
+ AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
+ EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId,
+ InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup,
+ MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
+ TypeOrConstParamId, TypeParamId, UnionId,
};
use hir_expand::{name::name, MacroCallKind};
use hir_ty::{
all_super_traits, autoderef,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
- display::HexifiedConst,
layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir},
@@ -89,11 +89,11 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{HasAttrs, Namespace},
diagnostics::{
- AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncoherentImpl,
- IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError, MacroExpansionParseError,
- MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
- MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
- ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
+ AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
+ IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
+ MacroExpansionParseError, MalformedDerive, MismatchedArgCount, MissingFields,
+ MissingMatchArms, MissingUnsafe, MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem,
+ PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
UnresolvedProcMacro, UnusedMut,
@@ -201,9 +201,8 @@ impl Crate {
db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
}
- pub fn root_module(self, db: &dyn HirDatabase) -> Module {
- let def_map = db.crate_def_map(self.id);
- Module { id: def_map.crate_root().into() }
+ pub fn root_module(self) -> Module {
+ Module { id: CrateRootModuleId::from(self.id).into() }
}
pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
@@ -248,7 +247,7 @@ impl Crate {
/// Try to get the root URL of the documentation of a crate.
pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
// Look for #![doc(html_root_url = "...")]
- let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
+ let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
}
@@ -379,11 +378,6 @@ impl ModuleDef {
ModuleDef::BuiltinType(_) | ModuleDef::Macro(_) => return Vec::new(),
};
- let module = match self.module(db) {
- Some(it) => it,
- None => return Vec::new(),
- };
-
let mut acc = Vec::new();
match self.as_def_with_body() {
@@ -391,7 +385,7 @@ impl ModuleDef {
def.diagnostics(db, &mut acc);
}
None => {
- for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
+ for diag in hir_ty::diagnostics::incorrect_case(db, id) {
acc.push(diag.into())
}
}
@@ -505,15 +499,10 @@ impl Module {
/// Finds nearest non-block ancestor `Module` (`self` included).
pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module {
let mut id = self.id;
- loop {
- let def_map = id.def_map(db.upcast());
- let origin = def_map[id.local_id].origin;
- if matches!(origin, ModuleOrigin::BlockExpr { .. }) {
- id = id.containing_module(db.upcast()).expect("block without parent module")
- } else {
- return Module { id };
- }
+ while id.is_block_module() {
+ id = id.containing_module(db.upcast()).expect("block without parent module");
}
+ Module { id }
}
pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
@@ -619,15 +608,21 @@ impl Module {
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
for impl_def in self.impl_defs(db) {
+ let loc = impl_def.id.lookup(db.upcast());
+ let tree = loc.id.item_tree(db.upcast());
+ let node = &tree[loc.id.value];
+ let file_id = loc.id.file_id();
+ if file_id.is_builtin_derive(db.upcast()) {
+ // these expansion come from us, diagnosing them is a waste of resources
+ // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
+ continue;
+ }
+
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
- let loc = impl_def.id.lookup(db.upcast());
- let tree = loc.id.item_tree(db.upcast());
- let node = &tree[loc.id.value];
- let file_id = loc.id.file_id();
let ast_id_map = db.ast_id_map(file_id);
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
@@ -698,16 +693,18 @@ impl Module {
fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
let id = macro_id_to_def_id(db.upcast(), m.id);
- if let Err(e) = db.macro_def(id) {
- let Some(ast) = id.ast_id().left() else {
- never!("MacroDefError for proc-macro: {:?}", e);
+ if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) {
+ if let Some(e) = expander.mac.err() {
+ let Some(ast) = id.ast_id().left() else {
+ never!("declarative expander for non decl-macro: {:?}", e);
return;
};
- emit_def_diagnostic_(
- db,
- acc,
- &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
- );
+ emit_def_diagnostic_(
+ db,
+ acc,
+ &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
+ );
+ }
}
}
@@ -753,7 +750,7 @@ fn emit_def_diagnostic_(
let item = ast.to_node(db.upcast());
acc.push(
InactiveCode {
- node: ast.with_value(AstPtr::new(&item).into()),
+ node: ast.with_value(SyntaxNodePtr::new(&item).into()),
cfg: cfg.clone(),
opts: opts.clone(),
}
@@ -963,8 +960,15 @@ impl Field {
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
- db.layout_of_ty(self.ty(db).ty.clone(), self.parent.module(db).krate().into())
- .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap()))
+ db.layout_of_ty(
+ self.ty(db).ty.clone(),
+ db.trait_environment(match hir_def::VariantId::from(self.parent) {
+ hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
+ hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
+ hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()),
+ }),
+ )
+ .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap()))
}
pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
@@ -1234,7 +1238,7 @@ impl Adt {
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
let subst = db.generic_defaults(self.into());
subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
- GenericArgData::Ty(x) => x.is_unknown(),
+ GenericArgData::Ty(it) => it.is_unknown(),
_ => false,
})
}
@@ -1244,8 +1248,12 @@ impl Adt {
return Err(LayoutError::HasPlaceholder);
}
let krate = self.krate(db).id;
- db.layout_of_adt(self.into(), Substitution::empty(Interner), krate)
- .map(|layout| Layout(layout, db.target_data_layout(krate).unwrap()))
+ db.layout_of_adt(
+ self.into(),
+ Substitution::empty(Interner),
+ db.trait_environment(self.into()),
+ )
+ .map(|layout| Layout(layout, db.target_data_layout(krate).unwrap()))
}
/// Turns this ADT into a type. Any type parameters of the ADT will be
@@ -1635,11 +1643,11 @@ impl DefWithBody {
for moof in &borrowck_result.moved_out_of_ref {
let span: InFile<SyntaxNodePtr> = match moof.span {
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
- Ok(s) => s.map(|x| x.into()),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
- Ok(s) => s.map(|x| match x {
+ Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
@@ -1661,6 +1669,14 @@ impl DefWithBody {
let Some(&local) = mir_body.binding_locals.get(binding_id) else {
continue;
};
+ if body[binding_id]
+ .definitions
+ .iter()
+ .any(|&pat| source_map.pat_syntax(pat).is_err())
+ {
+ // Skip synthetic bindings
+ continue;
+ }
let need_mut = &mol[local];
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
@@ -1670,11 +1686,11 @@ impl DefWithBody {
for span in spans {
let span: InFile<SyntaxNodePtr> = match span {
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
- Ok(s) => s.map(|x| x.into()),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|x| match x {
+ Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
@@ -1687,7 +1703,7 @@ impl DefWithBody {
}
(mir::MutabilityReason::Not, true) => {
if !infer.mutated_bindings_in_closure.contains(&binding_id) {
- let should_ignore = matches!(body[binding_id].name.as_str(), Some(x) if x.starts_with("_"));
+ let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
if !should_ignore {
acc.push(UnusedMut { local }.into())
}
@@ -1810,7 +1826,7 @@ impl DefWithBody {
// FIXME: don't ignore diagnostics for in type const
DefWithBody::InTypeConst(_) => return,
};
- for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
+ for diag in hir_ty::diagnostics::incorrect_case(db, def.into()) {
acc.push(diag.into())
}
}
@@ -1919,6 +1935,21 @@ impl Function {
db.function_data(self.id).has_async_kw()
}
+ /// Does this function have `#[test]` attribute?
+ pub fn is_test(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_test()
+ }
+
+ /// Does this function have the ignore attribute?
+ pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_ignore()
+ }
+
+ /// Does this function have `#[bench]` attribute?
+ pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_bench()
+ }
+
pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
hir_ty::is_fn_unsafe_to_call(db, self.id)
}
@@ -1962,7 +1993,7 @@ impl Function {
return r;
}
};
- let (result, stdout, stderr) = interpret_mir(db, &body, false);
+ let (result, stdout, stderr) = interpret_mir(db, body, false, None);
let mut text = match result {
Ok(_) => "pass".to_string(),
Err(e) => {
@@ -2098,6 +2129,47 @@ impl HasVisibility for Function {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ExternCrateDecl {
+ pub(crate) id: ExternCrateId,
+}
+
+impl ExternCrateDecl {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.module(db.upcast()).into()
+ }
+
+ pub fn resolved_crate(self, db: &dyn HirDatabase) -> Option<Crate> {
+ db.extern_crate_decl_data(self.id).crate_id.map(Into::into)
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.extern_crate_decl_data(self.id).name.clone()
+ }
+
+ pub fn alias(self, db: &dyn HirDatabase) -> Option<ImportAlias> {
+ db.extern_crate_decl_data(self.id).alias.clone()
+ }
+
+ /// Returns the name under which this crate is made accessible, taking `_` into account.
+ pub fn alias_or_name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let extern_crate_decl_data = db.extern_crate_decl_data(self.id);
+ match &extern_crate_decl_data.alias {
+ Some(ImportAlias::Underscore) => None,
+ Some(ImportAlias::Alias(alias)) => Some(alias.clone()),
+ None => Some(extern_crate_decl_data.name.clone()),
+ }
+ }
+}
+
+impl HasVisibility for ExternCrateDecl {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.extern_crate_decl_data(self.id)
+ .visibility
+ .resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InTypeConst {
pub(crate) id: InTypeConstId,
}
@@ -2131,8 +2203,28 @@ impl Const {
}
pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
- let c = db.const_eval(self.id.into(), Substitution::empty(Interner))?;
- let r = format!("{}", HexifiedConst(c).display(db));
+ let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
+ let data = &c.data(Interner);
+ if let TyKind::Scalar(s) = data.ty.kind(Interner) {
+ if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
+ if let hir_ty::ConstValue::Concrete(c) = &data.value {
+ if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned {
+ let value = u128::from_le_bytes(mir::pad16(b, false));
+ let value_signed =
+ i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
+ if value >= 10 {
+ return Ok(format!("{} ({:#X})", value_signed, value));
+ } else {
+ return Ok(format!("{}", value_signed));
+ }
+ }
+ }
+ }
+ }
+ if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
+ return Ok(s);
+ }
+ let r = format!("{}", c.display(db));
return Ok(r);
}
}
@@ -2270,7 +2362,7 @@ impl TypeAlias {
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
let subst = db.generic_defaults(self.id.into());
subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
- GenericArgData::Ty(x) => x.is_unknown(),
+ GenericArgData::Ty(it) => it.is_unknown(),
_ => false,
})
}
@@ -2660,8 +2752,8 @@ impl GenericDef {
let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
match toc.split(db) {
- Either::Left(x) => GenericParam::ConstParam(x),
- Either::Right(x) => GenericParam::TypeParam(x),
+ Either::Left(it) => GenericParam::ConstParam(it),
+ Either::Right(it) => GenericParam::TypeParam(it),
}
});
self.lifetime_params(db)
@@ -2709,14 +2801,14 @@ pub struct LocalSource {
impl LocalSource {
pub fn as_ident_pat(&self) -> Option<&ast::IdentPat> {
match &self.source.value {
- Either::Left(x) => Some(x),
+ Either::Left(it) => Some(it),
Either::Right(_) => None,
}
}
pub fn into_ident_pat(self) -> Option<ast::IdentPat> {
match self.source.value {
- Either::Left(x) => Some(x),
+ Either::Left(it) => Some(it),
Either::Right(_) => None,
}
}
@@ -2738,7 +2830,7 @@ impl LocalSource {
}
pub fn syntax_ptr(self) -> InFile<SyntaxNodePtr> {
- self.source.map(|x| SyntaxNodePtr::new(x.syntax()))
+ self.source.map(|it| SyntaxNodePtr::new(it.syntax()))
}
}
@@ -2797,13 +2889,13 @@ impl Local {
Type::new(db, def, ty)
}
- /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;`
+ /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = it;`
pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
let (body, source_map) = db.body_with_source_map(self.parent);
self.sources_(db, &body, &source_map).collect()
}
- /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
+ /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = it;`
pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
let (body, source_map) = db.body_with_source_map(self.parent);
let src = self.sources_(db, &body, &source_map).next().unwrap();
@@ -3057,7 +3149,9 @@ impl TypeParam {
let subst = TyBuilder::placeholder_subst(db, self.id.parent());
let ty = ty.substitute(Interner, &subst);
match ty.data(Interner) {
- GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
+ GenericArgData::Ty(it) => {
+ Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
+ }
_ => None,
}
}
@@ -3096,7 +3190,7 @@ impl ConstParam {
pub fn name(self, db: &dyn HirDatabase) -> Name {
let params = db.generic_params(self.id.parent());
match params.type_or_consts[self.id.local_id()].name() {
- Some(x) => x.clone(),
+ Some(it) => it.clone(),
None => {
never!();
Name::missing()
@@ -3153,8 +3247,8 @@ impl TypeOrConstParam {
pub fn ty(self, db: &dyn HirDatabase) -> Type {
match self.split(db) {
- Either::Left(x) => x.ty(db),
- Either::Right(x) => x.ty(db),
+ Either::Left(it) => it.ty(db),
+ Either::Right(it) => it.ty(db),
}
}
}
@@ -3260,9 +3354,9 @@ impl Impl {
self.id.lookup(db.upcast()).container.into()
}
- pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
let src = self.source(db)?;
- src.file_id.is_builtin_derive(db.upcast())
+ src.file_id.as_builtin_derive_attr_node(db.upcast())
}
}
@@ -3652,9 +3746,9 @@ impl Type {
};
let parent_subst = TyBuilder::subst_for_def(db, trait_id, None)
.push(self.ty.clone())
- .fill(|x| {
+ .fill(|it| {
// FIXME: this code is not covered in tests.
- match x {
+ match it {
ParamKind::Type => {
GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
}
@@ -3821,7 +3915,7 @@ impl Type {
pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> {
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
- try_const_usize(db, len).map(|x| (self.derived(ty.clone()), x as usize))
+ try_const_usize(db, len).map(|it| (self.derived(ty.clone()), it as usize))
} else {
None
}
@@ -4275,7 +4369,7 @@ impl Type {
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
- db.layout_of_ty(self.ty.clone(), self.env.krate)
+ db.layout_of_ty(self.ty.clone(), self.env.clone())
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
}
}
@@ -4662,6 +4756,12 @@ pub trait HasContainer {
fn container(&self, db: &dyn HirDatabase) -> ItemContainer;
}
+impl HasContainer for ExternCrateDecl {
+ fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
+ container_id_to_hir(self.id.lookup(db.upcast()).container.into())
+ }
+}
+
impl HasContainer for Module {
fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
// FIXME: handle block expressions as modules (their parent is in a different DefMap)
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 5a76a9185..e99d2984c 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -15,11 +15,7 @@ use hir_def::{
type_ref::Mutability,
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
};
-use hir_expand::{
- db::ExpandDatabase,
- name::{known, AsName},
- ExpansionInfo, MacroCallId,
-};
+use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
@@ -439,10 +435,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_path(path)
}
- pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
- self.imp.resolve_extern_crate(extern_crate)
- }
-
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
self.imp.resolve_variant(record_lit).map(VariantDef::from)
}
@@ -1242,18 +1234,6 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(path.syntax())?.resolve_path(self.db, path)
}
- fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
- let krate = self.scope(extern_crate.syntax())?.krate();
- let name = extern_crate.name_ref()?.as_name();
- if name == known::SELF_PARAM {
- return Some(krate);
- }
- krate
- .dependencies(self.db)
- .into_iter()
- .find_map(|dep| (dep.name == name).then_some(dep.krate))
- }
-
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}
@@ -1494,7 +1474,11 @@ impl<'db> SemanticsImpl<'db> {
}
fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
- let Some(enclosing_item) = expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) else { return false };
+ let Some(enclosing_item) =
+ expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
+ else {
+ return false;
+ };
let def = match &enclosing_item {
Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
@@ -1599,6 +1583,7 @@ to_def_impls![
(crate::Local, ast::SelfParam, self_param_to_def),
(crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def),
+ (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
];
fn find_root(node: &SyntaxNode) -> SyntaxNode {
@@ -1631,7 +1616,7 @@ pub struct SemanticsScope<'a> {
resolver: Resolver,
}
-impl<'a> SemanticsScope<'a> {
+impl SemanticsScope<'_> {
pub fn module(&self) -> Module {
Module { id: self.resolver.module() }
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index c50ffa4f8..aabda3655 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -93,9 +93,9 @@ use hir_def::{
DynMap,
},
hir::{BindingId, LabelId},
- AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
- GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
- TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
+ AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId,
+ FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
+ StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
use rustc_hash::FxHashMap;
@@ -203,6 +203,16 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<EnumVariantId> {
self.to_def(src, keys::VARIANT)
}
+ pub(super) fn extern_crate_to_def(
+ &mut self,
+ src: InFile<ast::ExternCrate>,
+ ) -> Option<ExternCrateId> {
+ self.to_def(src, keys::EXTERN_CRATE)
+ }
+ #[allow(dead_code)]
+ pub(super) fn use_to_def(&mut self, src: InFile<ast::Use>) -> Option<UseId> {
+ self.to_def(src, keys::USE)
+ }
pub(super) fn adt_to_def(
&mut self,
InFile { file_id, value }: InFile<ast::Adt>,
@@ -298,7 +308,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x))
+ dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|it| TypeParamId::from_unchecked(it))
}
pub(super) fn lifetime_param_to_def(
@@ -316,7 +326,10 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<ConstParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x))
+ dyn_map[keys::CONST_PARAM]
+ .get(&src.value)
+ .copied()
+ .map(|it| ConstParamId::from_unchecked(it))
}
pub(super) fn generic_param_to_def(
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index ecb1b306a..3499daf11 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -832,7 +832,7 @@ impl SourceAnalyzer {
None => return func,
};
let env = db.trait_environment_for_body(owner);
- method_resolution::lookup_impl_method(db, env, func, substs).0
+ db.lookup_impl_method(env, func, substs).0
}
fn resolve_impl_const_or_trait_def(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index d07c63726..6aca716bb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -3,7 +3,10 @@ use syntax::ast::{self, make, AstNode};
use crate::{
assist_context::{AssistContext, Assists},
- utils::{add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods,
+ IgnoreAssocItems,
+ },
AssistId, AssistKind,
};
@@ -43,6 +46,7 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_
acc,
ctx,
DefaultMethods::No,
+ IgnoreAssocItems::DocHiddenAttrPresent,
"add_impl_missing_members",
"Implement missing members",
)
@@ -87,6 +91,7 @@ pub(crate) fn add_missing_default_members(
acc,
ctx,
DefaultMethods::Only,
+ IgnoreAssocItems::DocHiddenAttrPresent,
"add_impl_default_members",
"Implement default members",
)
@@ -96,6 +101,7 @@ fn add_missing_impl_members_inner(
acc: &mut Assists,
ctx: &AssistContext<'_>,
mode: DefaultMethods,
+ ignore_items: IgnoreAssocItems,
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
@@ -115,10 +121,21 @@ fn add_missing_impl_members_inner(
let trait_ref = impl_.trait_ref(ctx.db())?;
let trait_ = trait_ref.trait_();
+ let mut ign_item = ignore_items;
+
+ if let IgnoreAssocItems::DocHiddenAttrPresent = ignore_items {
+ // Relax condition for local crates.
+ let db = ctx.db();
+ if trait_.module(db).krate().origin(db).is_local() {
+ ign_item = IgnoreAssocItems::No;
+ }
+ }
+
let missing_items = filter_assoc_items(
&ctx.sema,
&ide_db::traits::get_missing_assoc_items(&ctx.sema, &impl_def),
mode,
+ ign_item,
);
if missing_items.is_empty() {
@@ -1966,4 +1983,169 @@ impl AnotherTrait<i32> for () {
"#,
);
}
+
+ #[test]
+ fn doc_hidden_default_impls_ignored() {
+ // doc(hidden) attr is ignored trait and impl both belong to the local crate.
+ check_assist(
+ add_missing_default_members,
+ r#"
+struct Foo;
+trait Trait {
+ #[doc(hidden)]
+ fn func_with_default_impl() -> u32 {
+ 42
+ }
+ fn another_default_impl() -> u32 {
+ 43
+ }
+}
+impl Tra$0it for Foo {}"#,
+ r#"
+struct Foo;
+trait Trait {
+ #[doc(hidden)]
+ fn func_with_default_impl() -> u32 {
+ 42
+ }
+ fn another_default_impl() -> u32 {
+ 43
+ }
+}
+impl Trait for Foo {
+ $0fn func_with_default_impl() -> u32 {
+ 42
+ }
+
+ fn another_default_impl() -> u32 {
+ 43
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_lang_crates() {
+ // Not applicable because Eq has a single method and this has a #[doc(hidden)] attr set.
+ check_assist_not_applicable(
+ add_missing_default_members,
+ r#"
+//- minicore: eq
+use core::cmp::Eq;
+struct Foo;
+impl E$0q for Foo { /* $0 */ }
+"#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_lib_crates() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+ //- /main.rs crate:a deps:b
+ struct B;
+ impl b::Exte$0rnTrait for B {}
+ //- /lib.rs crate:b new_source_root:library
+ pub trait ExternTrait {
+ #[doc(hidden)]
+ fn hidden_default() -> Option<()> {
+ todo!()
+ }
+
+ fn unhidden_default() -> Option<()> {
+ todo!()
+ }
+
+ fn unhidden_nondefault() -> Option<()>;
+ }
+ "#,
+ r#"
+ struct B;
+ impl b::ExternTrait for B {
+ $0fn unhidden_default() -> Option<()> {
+ todo!()
+ }
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_local_crates() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+
+struct B;
+impl Loc$0alTrait for B {}
+ "#,
+ r#"
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+
+struct B;
+impl LocalTrait for B {
+ $0fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_workspace_crates() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+//- /lib.rs crate:b new_source_root:local
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+
+//- /main.rs crate:a deps:b
+struct B;
+impl b::Loc$0alTrait for B {}
+ "#,
+ r#"
+struct B;
+impl b::LocalTrait for B {
+ $0fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+ "#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 7384390f2..3b162d7c4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -8,10 +8,7 @@ use itertools::Itertools;
use syntax::ast::edit_in_place::Removable;
use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
-use crate::{
- utils::{self, render_snippet, Cursor},
- AssistContext, AssistId, AssistKind, Assists,
-};
+use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
// Assist: add_missing_match_arms
//
@@ -40,9 +37,9 @@ use crate::{
pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let match_expr = ctx.find_node_at_offset_with_descend::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?;
- let target_range = ctx.sema.original_range(match_expr.syntax()).range;
+ let arm_list_range = ctx.sema.original_range_opt(match_arm_list.syntax())?;
- if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
+ if cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list).is_none() {
let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
if cursor_in_range {
@@ -75,14 +72,18 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.collect();
let module = ctx.sema.scope(expr.syntax())?.module();
- let (mut missing_pats, is_non_exhaustive): (
+ let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
bool,
+ bool,
) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
+ let has_hidden_variants =
+ variants.iter().any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+
let missing_pats = variants
.into_iter()
.filter_map(|variant| {
@@ -101,7 +102,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
} else {
Box::new(missing_pats)
};
- (missing_pats.peekable(), is_non_exhaustive)
+ (missing_pats.peekable(), is_non_exhaustive, has_hidden_variants)
} else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) {
let is_non_exhaustive =
enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate()));
@@ -124,6 +125,12 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
if n_arms > 256 {
return None;
}
+
+ let has_hidden_variants = variants_of_enums
+ .iter()
+ .flatten()
+ .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+
let missing_pats = variants_of_enums
.into_iter()
.multi_cartesian_product()
@@ -139,7 +146,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+ (
+ (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(),
+ is_non_exhaustive,
+ has_hidden_variants,
+ )
} else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
@@ -148,6 +159,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
return None;
}
+ let has_hidden_variants =
+ variants.iter().any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+
let variants_of_enums = vec![variants; len];
let missing_pats = variants_of_enums
@@ -164,28 +178,42 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+ (
+ (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(),
+ is_non_exhaustive,
+ has_hidden_variants,
+ )
} else {
return None;
};
let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm;
- if !needs_catch_all_arm && missing_pats.peek().is_none() {
+ if !needs_catch_all_arm
+ && ((has_hidden_variants && has_catch_all_arm) || missing_pats.peek().is_none())
+ {
return None;
}
acc.add(
AssistId("add_missing_match_arms", AssistKind::QuickFix),
"Fill match arms",
- target_range,
- |builder| {
+ ctx.sema.original_range(match_expr.syntax()).range,
+ |edit| {
let new_match_arm_list = match_arm_list.clone_for_update();
+
+ // having any hidden variants means that we need a catch-all arm
+ needs_catch_all_arm |= has_hidden_variants;
+
let missing_arms = missing_pats
- .map(|(pat, hidden)| {
- (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden)
+ .filter(|(_, hidden)| {
+ // filter out hidden patterns because they're handled by the catch-all arm
+ !hidden
})
- .map(|(it, hidden)| (it.clone_for_update(), hidden));
+ .map(|(pat, _)| {
+ make::match_arm(iter::once(pat), None, make::ext::expr_todo())
+ .clone_for_update()
+ });
let catch_all_arm = new_match_arm_list
.arms()
@@ -204,15 +232,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
cov_mark::hit!(add_missing_match_arms_empty_expr);
}
}
+
let mut first_new_arm = None;
- for (arm, hidden) in missing_arms {
- if hidden {
- needs_catch_all_arm = !has_catch_all_arm;
- } else {
- first_new_arm.get_or_insert_with(|| arm.clone());
- new_match_arm_list.add_arm(arm);
- }
+ for arm in missing_arms {
+ first_new_arm.get_or_insert_with(|| arm.clone());
+ new_match_arm_list.add_arm(arm);
}
+
if needs_catch_all_arm && !has_catch_all_arm {
cov_mark::hit!(added_wildcard_pattern);
let arm = make::match_arm(
@@ -225,24 +251,38 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
new_match_arm_list.add_arm(arm);
}
- let old_range = ctx.sema.original_range(match_arm_list.syntax()).range;
- match (first_new_arm, ctx.config.snippet_cap) {
- (Some(first_new_arm), Some(cap)) => {
- let extend_lifetime;
- let cursor =
- match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast)
- {
- Some(it) => {
- extend_lifetime = it.syntax().clone();
- Cursor::Replace(&extend_lifetime)
- }
- None => Cursor::Before(first_new_arm.syntax()),
- };
- let snippet = render_snippet(cap, new_match_arm_list.syntax(), cursor);
- builder.replace_snippet(cap, old_range, snippet);
+ if let (Some(first_new_arm), Some(cap)) = (first_new_arm, ctx.config.snippet_cap) {
+ match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast) {
+ Some(it) => edit.add_placeholder_snippet(cap, it),
+ None => edit.add_tabstop_before(cap, first_new_arm),
}
- _ => builder.replace(old_range, new_match_arm_list.to_string()),
}
+
+ // FIXME: Hack for mutable syntax trees not having great support for macros
+ // Just replace the element that the original range came from
+ let old_place = {
+ // Find the original element
+ let file = ctx.sema.parse(arm_list_range.file_id);
+ let old_place = file.syntax().covering_element(arm_list_range.range);
+
+ // Make `old_place` mut
+ match old_place {
+ syntax::SyntaxElement::Node(it) => {
+ syntax::SyntaxElement::from(edit.make_syntax_mut(it))
+ }
+ syntax::SyntaxElement::Token(it) => {
+ // Don't have a way to make tokens mut, so instead make the parent mut
+ // and find the token again
+ let parent = edit.make_syntax_mut(it.parent().unwrap());
+ let mut_token =
+ parent.covering_element(it.text_range()).into_token().unwrap();
+
+ syntax::SyntaxElement::from(mut_token)
+ }
+ }
+ };
+
+ syntax::ted::replace(old_place, new_match_arm_list.syntax());
},
)
}
@@ -1621,10 +1661,9 @@ pub enum E { #[doc(hidden)] A, }
);
}
- // FIXME: I don't think the assist should be applicable in this case
#[test]
fn does_not_fill_wildcard_with_wildcard() {
- check_assist(
+ check_assist_not_applicable(
add_missing_match_arms,
r#"
//- /main.rs crate:main deps:e
@@ -1636,13 +1675,6 @@ fn foo(t: ::e::E) {
//- /e.rs crate:e
pub enum E { #[doc(hidden)] A, }
"#,
- r#"
-fn foo(t: ::e::E) {
- match t {
- _ => todo!(),
- }
-}
-"#,
);
}
@@ -1777,7 +1809,7 @@ fn foo(t: ::e::E, b: bool) {
#[test]
fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() {
- check_assist(
+ check_assist_not_applicable(
add_missing_match_arms,
r#"
//- /main.rs crate:main deps:e
@@ -1789,14 +1821,6 @@ fn foo(t: ::e::E, b: bool) {
}
//- /e.rs crate:e
pub enum E { #[doc(hidden)] A, }"#,
- r#"
-fn foo(t: ::e::E, b: bool) {
- match t {
- _ if b => todo!(),
- _ => todo!(),
- }
-}
-"#,
);
}
@@ -1897,4 +1921,24 @@ fn foo(t: E) {
}"#,
);
}
+
+ #[test]
+ fn not_applicable_when_match_arm_list_cannot_be_upmapped() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+macro_rules! foo {
+ ($($t:tt)*) => {
+ $($t)* {}
+ }
+}
+
+enum E { A }
+
+fn main() {
+ foo!(match E::A$0);
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
index acf82e4b2..36f68d176 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -42,7 +42,9 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let name_ref = ast::NameRef::cast(ident.parent()?)?;
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
NameRefClass::Definition(def) => def,
- NameRefClass::FieldShorthand { .. } => return None,
+ NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
+ return None
+ }
};
let fun = match def {
Definition::Function(it) => it,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
index 2b1d8f6f0..e6179ab8b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
@@ -2,9 +2,10 @@ use syntax::{
ast::{self, HasName, HasVisibility},
AstNode,
SyntaxKind::{
- CONST, ENUM, FN, MACRO_DEF, MODULE, STATIC, STRUCT, TRAIT, TYPE_ALIAS, USE, VISIBILITY,
+ self, ASSOC_ITEM_LIST, CONST, ENUM, FN, MACRO_DEF, MODULE, SOURCE_FILE, STATIC, STRUCT,
+ TRAIT, TYPE_ALIAS, USE, VISIBILITY,
},
- T,
+ SyntaxNode, T,
};
use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
@@ -46,13 +47,11 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (offset, target) = if let Some(keyword) = item_keyword {
let parent = keyword.parent()?;
- let def_kws =
- vec![CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT, USE, MACRO_DEF];
- // Parent is not a definition, can't add visibility
- if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
+
+ if !can_add(&parent) {
return None;
}
- // Already have visibility, do nothing
+ // Already has visibility, do nothing
if parent.children().any(|child| child.kind() == VISIBILITY) {
return None;
}
@@ -86,6 +85,29 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
)
}
+fn can_add(node: &SyntaxNode) -> bool {
+ const LEGAL: &[SyntaxKind] =
+ &[CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT, USE, MACRO_DEF];
+
+ LEGAL.contains(&node.kind()) && {
+ let Some(p) = node.parent() else {
+ return false;
+ };
+
+ if p.kind() == ASSOC_ITEM_LIST {
+ p.parent()
+ .and_then(|it| ast::Impl::cast(it))
+ // inherent impls i.e 'non-trait impls' have a non-local
+ // effect, thus can have visibility even when nested.
+ // so filter them out
+ .filter(|imp| imp.for_token().is_none())
+ .is_some()
+ } else {
+ matches!(p.kind(), SOURCE_FILE | MODULE)
+ }
+ }
+}
+
fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
if vis.syntax().text() == "pub" {
let target = vis.syntax().text_range();
@@ -129,6 +151,16 @@ mod tests {
check_assist(change_visibility, "unsafe f$0n foo() {}", "pub(crate) unsafe fn foo() {}");
check_assist(change_visibility, "$0macro foo() {}", "pub(crate) macro foo() {}");
check_assist(change_visibility, "$0use foo;", "pub(crate) use foo;");
+ check_assist(
+ change_visibility,
+ "impl Foo { f$0n foo() {} }",
+ "impl Foo { pub(crate) fn foo() {} }",
+ );
+ check_assist(
+ change_visibility,
+ "fn bar() { impl Foo { f$0n foo() {} } }",
+ "fn bar() { impl Foo { pub(crate) fn foo() {} } }",
+ );
}
#[test]
@@ -213,4 +245,33 @@ mod tests {
check_assist_target(change_visibility, "pub(crate)$0 fn foo() {}", "pub(crate)");
check_assist_target(change_visibility, "struct S { $0field: u32 }", "field");
}
+
+ #[test]
+ fn not_applicable_for_items_within_traits() {
+ check_assist_not_applicable(change_visibility, "trait Foo { f$0n run() {} }");
+ check_assist_not_applicable(change_visibility, "trait Foo { con$0st FOO: u8 = 69; }");
+ check_assist_not_applicable(change_visibility, "impl Foo for Bar { f$0n quox() {} }");
+ }
+
+ #[test]
+ fn not_applicable_for_items_within_fns() {
+ check_assist_not_applicable(change_visibility, "fn foo() { f$0n inner() {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { unsafe f$0n inner() {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { const f$0n inner() {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { con$0st FOO: u8 = 69; }");
+ check_assist_not_applicable(change_visibility, "fn foo() { en$0um Foo {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { stru$0ct Foo {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { mo$0d foo {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { $0use foo; }");
+ check_assist_not_applicable(change_visibility, "fn foo() { $0type Foo = Bar<T>; }");
+ check_assist_not_applicable(change_visibility, "fn foo() { tr$0ait Foo {} }");
+ check_assist_not_applicable(
+ change_visibility,
+ "fn foo() { impl Trait for Bar { f$0n bar() {} } }",
+ );
+ check_assist_not_applicable(
+ change_visibility,
+ "fn foo() { impl Trait for Bar { con$0st FOO: u8 = 69; } }",
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
index b1b0f587c..6a5b11f54 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
@@ -1,3 +1,6 @@
+use hir::Semantics;
+use ide_db::RootDatabase;
+use stdx::format_to;
use syntax::ast::{self, AstNode};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -24,6 +27,7 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
+ use ArmBodyExpression::*;
let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?;
let mut arms = match_arm_list.arms();
@@ -33,21 +37,20 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
cov_mark::hit!(non_two_arm_match);
return None;
}
- let first_arm_expr = first_arm.expr();
- let second_arm_expr = second_arm.expr();
+ let first_arm_expr = first_arm.expr()?;
+ let second_arm_expr = second_arm.expr()?;
+ let first_arm_body = is_bool_literal_expr(&ctx.sema, &first_arm_expr)?;
+ let second_arm_body = is_bool_literal_expr(&ctx.sema, &second_arm_expr)?;
- let invert_matches = if is_bool_literal_expr(&first_arm_expr, true)
- && is_bool_literal_expr(&second_arm_expr, false)
- {
- false
- } else if is_bool_literal_expr(&first_arm_expr, false)
- && is_bool_literal_expr(&second_arm_expr, true)
- {
- true
- } else {
+ if !matches!(
+ (&first_arm_body, &second_arm_body),
+ (Literal(true), Literal(false))
+ | (Literal(false), Literal(true))
+ | (Expression(_), Literal(false))
+ ) {
cov_mark::hit!(non_invert_bool_literal_arms);
return None;
- };
+ }
let target_range = ctx.sema.original_range(match_expr.syntax()).range;
let expr = match_expr.expr()?;
@@ -59,28 +62,55 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
|builder| {
let mut arm_str = String::new();
if let Some(pat) = &first_arm.pat() {
- arm_str += &pat.to_string();
+ format_to!(arm_str, "{pat}");
}
if let Some(guard) = &first_arm.guard() {
arm_str += &format!(" {guard}");
}
- if invert_matches {
- builder.replace(target_range, format!("!matches!({expr}, {arm_str})"));
- } else {
- builder.replace(target_range, format!("matches!({expr}, {arm_str})"));
- }
+
+ let replace_with = match (first_arm_body, second_arm_body) {
+ (Literal(true), Literal(false)) => {
+ format!("matches!({expr}, {arm_str})")
+ }
+ (Literal(false), Literal(true)) => {
+ format!("!matches!({expr}, {arm_str})")
+ }
+ (Expression(body_expr), Literal(false)) => {
+ arm_str.push_str(match &first_arm.guard() {
+ Some(_) => " && ",
+ _ => " if ",
+ });
+ format!("matches!({expr}, {arm_str}{body_expr})")
+ }
+ _ => {
+ unreachable!()
+ }
+ };
+ builder.replace(target_range, replace_with);
},
)
}
-fn is_bool_literal_expr(expr: &Option<ast::Expr>, expect_bool: bool) -> bool {
- if let Some(ast::Expr::Literal(lit)) = expr {
+enum ArmBodyExpression {
+ Literal(bool),
+ Expression(ast::Expr),
+}
+
+fn is_bool_literal_expr(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<ArmBodyExpression> {
+ if let ast::Expr::Literal(lit) = expr {
if let ast::LiteralKind::Bool(b) = lit.kind() {
- return b == expect_bool;
+ return Some(ArmBodyExpression::Literal(b));
}
}
- return false;
+ if !sema.type_of_expr(expr)?.original.is_bool() {
+ return None;
+ }
+
+ Some(ArmBodyExpression::Expression(expr.clone()))
}
#[cfg(test)]
@@ -122,21 +152,6 @@ fn foo(a: Option<u32>) -> bool {
}
#[test]
- fn not_applicable_non_bool_literal_arms() {
- cov_mark::check!(non_invert_bool_literal_arms);
- check_assist_not_applicable(
- convert_two_arm_bool_match_to_matches_macro,
- r#"
-fn foo(a: Option<u32>) -> bool {
- match a$0 {
- Some(val) => val == 3,
- _ => false
- }
-}
- "#,
- );
- }
- #[test]
fn not_applicable_both_false_arms() {
cov_mark::check!(non_invert_bool_literal_arms);
check_assist_not_applicable(
@@ -291,4 +306,40 @@ fn main() {
}",
);
}
+
+ #[test]
+ fn convert_non_literal_bool() {
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn main() {
+ match 0$0 {
+ a @ 0..15 => a == 0,
+ _ => false,
+ }
+}
+"#,
+ r#"
+fn main() {
+ matches!(0, a @ 0..15 if a == 0)
+}
+"#,
+ );
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn main() {
+ match 0$0 {
+ a @ 0..15 if thing() => a == 0,
+ _ => false,
+ }
+}
+"#,
+ r#"
+fn main() {
+ matches!(0, a @ 0..15 if thing() && a == 0)
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index ea71d165e..f30ca2552 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -114,7 +114,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
Definition::Local(def)
.usages(&ctx.sema)
- .in_scope(SearchScope::single_file(ctx.file_id()))
+ .in_scope(&SearchScope::single_file(ctx.file_id()))
.all()
});
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
index 226a5dd9f..ddc8a50ed 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -27,7 +27,9 @@ use crate::{
pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
// Only allow doc comments
- let Some(placement) = comment.kind().doc else { return None; };
+ let Some(placement) = comment.kind().doc else {
+ return None;
+ };
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
index 5c435dd9c..9beb616d9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{AssocItem, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef};
+use hir::{AssocItem, Enum, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef};
use ide_db::{
defs::{Definition, NameRefClass},
search::SearchScope,
@@ -45,7 +45,8 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let use_tree = star.parent().and_then(ast::UseTree::cast)?;
let (parent, mod_path) = find_parent_and_path(&star)?;
let target_module = match ctx.sema.resolve_path(&mod_path)? {
- PathResolution::Def(ModuleDef::Module(it)) => it,
+ PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it),
+ PathResolution::Def(ModuleDef::Adt(hir::Adt::Enum(e))) => Expandable::Enum(e),
_ => return None,
};
@@ -90,6 +91,11 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
)
}
+enum Expandable {
+ Module(Module),
+ Enum(Enum),
+}
+
fn find_parent_and_path(
star: &SyntaxToken,
) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
@@ -114,7 +120,7 @@ fn find_parent_and_path(
fn def_is_referenced_in(def: Definition, ctx: &AssistContext<'_>) -> bool {
let search_scope = SearchScope::single_file(ctx.file_id());
- def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
+ def.usages(&ctx.sema).in_scope(&search_scope).at_least_one()
}
#[derive(Debug, Clone)]
@@ -168,23 +174,59 @@ impl Refs {
}
}
-fn find_refs_in_mod(ctx: &AssistContext<'_>, module: Module, visible_from: Module) -> Option<Refs> {
- if !is_mod_visible_from(ctx, module, visible_from) {
+fn find_refs_in_mod(
+ ctx: &AssistContext<'_>,
+ expandable: Expandable,
+ visible_from: Module,
+) -> Option<Refs> {
+ if !is_expandable_visible_from(ctx, &expandable, visible_from) {
return None;
}
- let module_scope = module.scope(ctx.db(), Some(visible_from));
- let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
- Some(Refs(refs))
+ match expandable {
+ Expandable::Module(module) => {
+ let module_scope = module.scope(ctx.db(), Some(visible_from));
+ let refs =
+ module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
+ Some(Refs(refs))
+ }
+ Expandable::Enum(enm) => Some(Refs(
+ enm.variants(ctx.db())
+ .into_iter()
+ .map(|v| Ref { visible_name: v.name(ctx.db()), def: Definition::Variant(v) })
+ .collect(),
+ )),
+ }
}
-fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
- match module.parent(ctx.db()) {
- Some(parent) => {
- module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
- && is_mod_visible_from(ctx, parent, from)
+fn is_expandable_visible_from(
+ ctx: &AssistContext<'_>,
+ expandable: &Expandable,
+ from: Module,
+) -> bool {
+ fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
+ match module.parent(ctx.db()) {
+ Some(parent) => {
+ module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, parent, from)
+ }
+ None => true,
+ }
+ }
+
+ match expandable {
+ Expandable::Module(module) => match module.parent(ctx.db()) {
+ Some(parent) => {
+ module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, parent, from)
+ }
+ None => true,
+ },
+ Expandable::Enum(enm) => {
+ let module = enm.module(ctx.db());
+ enm.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, module, from)
}
- None => true,
}
}
@@ -897,4 +939,98 @@ struct Baz {
",
);
}
+
+ #[test]
+ fn test_support_for_enums() {
+ check_assist(
+ expand_glob_import,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+use foo::Foo;
+use foo::Foo::*$0;
+
+struct Strukt {
+ bar: Foo,
+}
+
+fn main() {
+ let s: Strukt = Strukt { bar: Bar };
+}"#,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+use foo::Foo;
+use foo::Foo::Bar;
+
+struct Strukt {
+ bar: Foo,
+}
+
+fn main() {
+ let s: Strukt = Strukt { bar: Bar };
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_expanding_multiple_variants_at_once() {
+ check_assist(
+ expand_glob_import,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+mod abc {
+ use super::foo;
+ use super::foo::Foo::*$0;
+
+ struct Strukt {
+ baz: foo::Foo,
+ bar: foo::Foo,
+ }
+
+ fn trying_calling() {
+ let s: Strukt = Strukt { bar: Bar , baz : Baz };
+ }
+
+}"#,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+mod abc {
+ use super::foo;
+ use super::foo::Foo::{Bar, Baz};
+
+ struct Strukt {
+ baz: foo::Foo,
+ bar: foo::Foo,
+ }
+
+ fn trying_calling() {
+ let s: Strukt = Strukt { bar: Bar , baz : Baz };
+ }
+
+}"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index 2a67909e6..b8b781ea4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -384,7 +384,7 @@ impl LocalUsages {
Self(
Definition::Local(var)
.usages(&ctx.sema)
- .in_scope(SearchScope::single_file(ctx.file_id()))
+ .in_scope(&SearchScope::single_file(ctx.file_id()))
.all(),
)
}
@@ -1360,14 +1360,15 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> St
}
format_to!(buf, "{expr}");
- let insert_comma = fun
- .body
- .parent()
- .and_then(ast::MatchArm::cast)
- .map_or(false, |it| it.comma_token().is_none());
+ let parent_match_arm = fun.body.parent().and_then(ast::MatchArm::cast);
+ let insert_comma = parent_match_arm.as_ref().is_some_and(|it| it.comma_token().is_none());
+
if insert_comma {
buf.push(',');
- } else if fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) {
+ } else if parent_match_arm.is_none()
+ && fun.ret_ty.is_unit()
+ && (!fun.outliving_locals.is_empty() || !expr.is_block_like())
+ {
buf.push(';');
}
buf
@@ -4611,6 +4612,29 @@ fn $0fun_name() -> i32 {
}
"#,
);
+
+ // Makes sure no semicolon is added for unit-valued match arms
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match () {
+ _ => $0()$0,
+ }
+}
+"#,
+ r#"
+fn main() {
+ match () {
+ _ => fun_name(),
+ }
+}
+
+fn $0fun_name() {
+ ()
+}
+"#,
+ )
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index de37f5f13..6839c5820 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -478,7 +478,7 @@ impl Module {
let selection_range = ctx.selection_trimmed();
let curr_file_id = ctx.file_id();
let search_scope = SearchScope::single_file(curr_file_id);
- let usage_res = def.usages(&ctx.sema).in_scope(search_scope).all();
+ let usage_res = def.usages(&ctx.sema).in_scope(&search_scope).all();
let file = ctx.sema.parse(curr_file_id);
let mut exists_inside_sel = false;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
index d6c59a9c8..c9f272474 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -1,11 +1,11 @@
use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
use ide_db::base_db::FileId;
use syntax::{
- ast::{self, HasVisibility as _},
- AstNode, TextRange, TextSize,
+ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
+ AstNode, TextRange,
};
-use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, AssistKind, Assists};
// FIXME: this really should be a fix for diagnostic, rather than an assist.
@@ -40,12 +40,16 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
let qualifier = path.qualifier()?;
let name_ref = path.segment()?.name_ref()?;
let qualifier_res = ctx.sema.resolve_path(&qualifier)?;
- let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else { return None; };
+ let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else {
+ return None;
+ };
let (_, def) = module
.scope(ctx.db(), None)
.into_iter()
.find(|(name, _)| name.to_smol_str() == name_ref.text().as_str())?;
- let ScopeDef::ModuleDef(def) = def else { return None; };
+ let ScopeDef::ModuleDef(def) = def else {
+ return None;
+ };
let current_module = ctx.sema.scope(path.syntax())?.module();
let target_module = def.module(ctx.db())?;
@@ -54,11 +58,13 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
return None;
};
- let (offset, current_visibility, target, target_file, target_name) =
- target_data_for_def(ctx.db(), def)?;
+ let (vis_owner, target, target_file, target_name) = target_data_for_def(ctx.db(), def)?;
- let missing_visibility =
- if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let missing_visibility = if current_module.krate() == target_module.krate() {
+ make::visibility_pub_crate()
+ } else {
+ make::visibility_pub()
+ };
let assist_label = match target_name {
None => format!("Change visibility to {missing_visibility}"),
@@ -67,23 +73,14 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
}
};
- acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
- builder.edit_file(target_file);
- match ctx.config.snippet_cap {
- Some(cap) => match current_visibility {
- Some(current_visibility) => builder.replace_snippet(
- cap,
- current_visibility.syntax().text_range(),
- format!("$0{missing_visibility}"),
- ),
- None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
- },
- None => match current_visibility {
- Some(current_visibility) => {
- builder.replace(current_visibility.syntax().text_range(), missing_visibility)
- }
- None => builder.insert(offset, format!("{missing_visibility} ")),
- },
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
+ edit.edit_file(target_file);
+
+ let vis_owner = edit.make_mut(vis_owner);
+ vis_owner.set_visibility(missing_visibility.clone_for_update());
+
+ if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
+ edit.add_tabstop_before(cap, vis);
}
})
}
@@ -103,19 +100,22 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
let target_module = parent.module(ctx.db());
let in_file_source = record_field_def.source(ctx.db())?;
- let (offset, current_visibility, target) = match in_file_source.value {
+ let (vis_owner, target) = match in_file_source.value {
hir::FieldSource::Named(it) => {
- let s = it.syntax();
- (vis_offset(s), it.visibility(), s.text_range())
+ let range = it.syntax().text_range();
+ (ast::AnyHasVisibility::new(it), range)
}
hir::FieldSource::Pos(it) => {
- let s = it.syntax();
- (vis_offset(s), it.visibility(), s.text_range())
+ let range = it.syntax().text_range();
+ (ast::AnyHasVisibility::new(it), range)
}
};
- let missing_visibility =
- if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let missing_visibility = if current_module.krate() == target_module.krate() {
+ make::visibility_pub_crate()
+ } else {
+ make::visibility_pub()
+ };
let target_file = in_file_source.file_id.original_file(ctx.db());
let target_name = record_field_def.name(ctx.db());
@@ -125,23 +125,14 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
target_name.display(ctx.db())
);
- acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
- builder.edit_file(target_file);
- match ctx.config.snippet_cap {
- Some(cap) => match current_visibility {
- Some(current_visibility) => builder.replace_snippet(
- cap,
- current_visibility.syntax().text_range(),
- format!("$0{missing_visibility}"),
- ),
- None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
- },
- None => match current_visibility {
- Some(current_visibility) => {
- builder.replace(current_visibility.syntax().text_range(), missing_visibility)
- }
- None => builder.insert(offset, format!("{missing_visibility} ")),
- },
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
+ edit.edit_file(target_file);
+
+ let vis_owner = edit.make_mut(vis_owner);
+ vis_owner.set_visibility(missing_visibility.clone_for_update());
+
+ if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
+ edit.add_tabstop_before(cap, vis);
}
})
}
@@ -149,11 +140,11 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
fn target_data_for_def(
db: &dyn HirDatabase,
def: hir::ModuleDef,
-) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> {
+) -> Option<(ast::AnyHasVisibility, TextRange, FileId, Option<hir::Name>)> {
fn offset_target_and_file_id<S, Ast>(
db: &dyn HirDatabase,
x: S,
- ) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId)>
+ ) -> Option<(ast::AnyHasVisibility, TextRange, FileId)>
where
S: HasSource<Ast = Ast>,
Ast: AstNode + ast::HasVisibility,
@@ -161,18 +152,12 @@ fn target_data_for_def(
let source = x.source(db)?;
let in_file_syntax = source.syntax();
let file_id = in_file_syntax.file_id;
- let syntax = in_file_syntax.value;
- let current_visibility = source.value.visibility();
- Some((
- vis_offset(syntax),
- current_visibility,
- syntax.text_range(),
- file_id.original_file(db.upcast()),
- ))
+ let range = in_file_syntax.value.text_range();
+ Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db.upcast())))
}
let target_name;
- let (offset, current_visibility, target, target_file) = match def {
+ let (offset, target, target_file) = match def {
hir::ModuleDef::Function(f) => {
target_name = Some(f.name(db));
offset_target_and_file_id(db, f)?
@@ -209,8 +194,8 @@ fn target_data_for_def(
target_name = m.name(db);
let in_file_source = m.declaration_source(db)?;
let file_id = in_file_source.file_id.original_file(db.upcast());
- let syntax = in_file_source.value.syntax();
- (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id)
+ let range = in_file_source.value.syntax().text_range();
+ (ast::AnyHasVisibility::new(in_file_source.value), range, file_id)
}
// FIXME
hir::ModuleDef::Macro(_) => return None,
@@ -218,7 +203,7 @@ fn target_data_for_def(
hir::ModuleDef::Variant(_) | hir::ModuleDef::BuiltinType(_) => return None,
};
- Some((offset, current_visibility, target, target_file, target_name))
+ Some((offset, target, target_file, target_name))
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
index 860372941..7e4f140a2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -15,6 +15,7 @@ use crate::{
// Generates default implementation from new method.
//
// ```
+// # //- minicore: default
// struct Example { _inner: () }
//
// impl Example {
@@ -54,6 +55,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
}
let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
+ let self_ty = impl_.self_ty()?;
if is_default_implemented(ctx, &impl_) {
cov_mark::hit!(default_block_is_already_present);
cov_mark::hit!(struct_in_module_with_default);
@@ -70,15 +72,19 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
let default_code = " fn default() -> Self {
Self::new()
}";
- let code = generate_trait_impl_text_from_impl(&impl_, "Default", default_code);
+ let code = generate_trait_impl_text_from_impl(&impl_, self_ty, "Default", default_code);
builder.insert(insert_location.end(), code);
},
)
}
// FIXME: based on from utils::generate_impl_text_inner
-fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
- let impl_ty = impl_.self_ty().unwrap();
+fn generate_trait_impl_text_from_impl(
+ impl_: &ast::Impl,
+ self_ty: ast::Type,
+ trait_text: &str,
+ code: &str,
+) -> String {
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
@@ -109,7 +115,7 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code:
if let Some(generic_params) = &generic_params {
format_to!(buf, "{generic_params}")
}
- format_to!(buf, " {trait_text} for {impl_ty}");
+ format_to!(buf, " {trait_text} for {self_ty}");
match impl_.where_clause() {
Some(where_clause) => {
@@ -136,7 +142,9 @@ fn is_default_implemented(ctx: &AssistContext<'_>, impl_: &Impl) -> bool {
let default = FamousDefs(&ctx.sema, krate).core_default_Default();
let default_trait = match default {
Some(value) => value,
- None => return false,
+ // Return `true` to avoid providing the assist because it makes no sense
+ // to impl `Default` when it's missing.
+ None => return true,
};
ty.impls_trait(db, default_trait, &[])
@@ -480,6 +488,7 @@ impl Example {
check_assist_not_applicable(
generate_default_from_new,
r#"
+//- minicore: default
struct Example { _inner: () }
impl Example {
@@ -655,4 +664,23 @@ mod test {
"#,
);
}
+
+ #[test]
+ fn not_applicable_when_default_lang_item_is_missing() {
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+struct S;
+impl S {
+ fn new$0() -> Self {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_missing_self_ty() {
+ // Regression test for #15398.
+ check_assist_not_applicable(generate_default_from_new, "impl { fn new$0() -> Self {} }");
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index b68c766e6..31fc69562 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,13 +1,17 @@
use std::collections::HashSet;
use hir::{self, HasCrate, HasSource, HasVisibility};
-use syntax::ast::{self, make, AstNode, HasGenericParams, HasName, HasVisibility as _};
+use syntax::{
+ ast::{
+ self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
+ },
+ ted,
+};
use crate::{
- utils::{convert_param_list_to_arg_list, find_struct_impl, render_snippet, Cursor},
+ utils::{convert_param_list_to_arg_list, find_struct_impl},
AssistContext, AssistId, AssistKind, Assists, GroupLabel,
};
-use syntax::ast::edit::AstNodeEdit;
// Assist: generate_delegate_methods
//
@@ -88,13 +92,15 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let adt = ast::Adt::Struct(strukt.clone());
let name = name.display(ctx.db()).to_string();
// if `find_struct_impl` returns None, that means that a function named `name` already exists.
- let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { continue; };
+ let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
+ continue;
+ };
acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate),
format!("Generate delegate for `{field_name}.{name}()`",),
target,
- |builder| {
+ |edit| {
// Create the function
let method_source = match method.source(ctx.db()) {
Some(source) => source.value,
@@ -133,36 +139,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
is_const,
is_unsafe,
)
- .indent(ast::edit::IndentLevel(1))
.clone_for_update();
- let cursor = Cursor::Before(f.syntax());
-
- // Create or update an impl block, attach the function to it,
- // then insert into our code.
- match impl_def {
- Some(impl_def) => {
- // Remember where in our source our `impl` block lives.
- let impl_def = impl_def.clone_for_update();
- let old_range = impl_def.syntax().text_range();
-
- // Attach the function to the impl block
- let assoc_items = impl_def.get_or_create_assoc_item_list();
- assoc_items.add_item(f.clone().into());
-
- // Update the impl block.
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snippet = render_snippet(cap, impl_def.syntax(), cursor);
- builder.replace_snippet(cap, old_range, snippet);
- }
- None => {
- builder.replace(old_range, impl_def.syntax().to_string());
- }
- }
- }
+ // Get the impl to update, or create one if we need to.
+ let impl_def = match impl_def {
+ Some(impl_def) => edit.make_mut(impl_def),
None => {
- // Attach the function to the impl block
let name = &strukt_name.to_string();
let params = strukt.generic_param_list();
let ty_params = params.clone();
@@ -176,24 +158,34 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
None,
)
.clone_for_update();
- let assoc_items = impl_def.get_or_create_assoc_item_list();
- assoc_items.add_item(f.clone().into());
+
+ // Fixup impl_def indentation
+ let indent = strukt.indent_level();
+ impl_def.reindent_to(indent);
// Insert the impl block.
- match ctx.config.snippet_cap {
- Some(cap) => {
- let offset = strukt.syntax().text_range().end();
- let snippet = render_snippet(cap, impl_def.syntax(), cursor);
- let snippet = format!("\n\n{snippet}");
- builder.insert_snippet(cap, offset, snippet);
- }
- None => {
- let offset = strukt.syntax().text_range().end();
- let snippet = format!("\n\n{}", impl_def.syntax());
- builder.insert(offset, snippet);
- }
- }
+ let strukt = edit.make_mut(strukt.clone());
+ ted::insert_all(
+ ted::Position::after(strukt.syntax()),
+ vec![
+ make::tokens::whitespace(&format!("\n\n{indent}")).into(),
+ impl_def.syntax().clone().into(),
+ ],
+ );
+
+ impl_def
}
+ };
+
+ // Fixup function indentation.
+ // FIXME: Should really be handled by `AssocItemList::add_item`
+ f.reindent_to(impl_def.indent_level() + 1);
+
+ let assoc_items = impl_def.get_or_create_assoc_item_list();
+ assoc_items.add_item(f.clone().into());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ edit.add_tabstop_before(cap, f)
}
},
)?;
@@ -243,6 +235,45 @@ impl Person {
}
#[test]
+ fn test_generate_delegate_create_impl_block_match_indent() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ ag$0e: Age,
+ }
+}"#,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ age: Age,
+ }
+
+ impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn test_generate_delegate_update_impl_block() {
check_assist(
generate_delegate_methods,
@@ -280,6 +311,47 @@ impl Person {
}
#[test]
+ fn test_generate_delegate_update_impl_block_match_indent() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ ag$0e: Age,
+ }
+
+ impl Person {}
+}"#,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ age: Age,
+ }
+
+ impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn test_generate_delegate_tuple_struct() {
check_assist(
generate_delegate_methods,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
new file mode 100644
index 000000000..f4fa6a74c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -0,0 +1,1051 @@
+use std::ops::Not;
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::convert_param_list_to_arg_list,
+};
+use either::Either;
+use hir::{db::HirDatabase, HasVisibility};
+use ide_db::{
+ assists::{AssistId, GroupLabel},
+ path_transform::PathTransform,
+};
+use syntax::{
+ ast::{
+ self,
+ edit::{self, AstNodeEdit},
+ make, AssocItem, HasGenericParams, HasName, HasVisibility as astHasVisibility, Path,
+ },
+ ted::{self, Position},
+ AstNode, NodeOrToken, SyntaxKind,
+};
+
+// Assist: generate_delegate_trait
+//
+// Generate delegate trait implementation for `StructField`s.
+//
+// ```
+// trait SomeTrait {
+// type T;
+// fn fn_(arg: u32) -> u32;
+// fn method_(&mut self) -> bool;
+// }
+// struct A;
+// impl SomeTrait for A {
+// type T = u32;
+//
+// fn fn_(arg: u32) -> u32 {
+// 42
+// }
+//
+// fn method_(&mut self) -> bool {
+// false
+// }
+// }
+// struct B {
+// a$0: A,
+// }
+// ```
+// ->
+// ```
+// trait SomeTrait {
+// type T;
+// fn fn_(arg: u32) -> u32;
+// fn method_(&mut self) -> bool;
+// }
+// struct A;
+// impl SomeTrait for A {
+// type T = u32;
+//
+// fn fn_(arg: u32) -> u32 {
+// 42
+// }
+//
+// fn method_(&mut self) -> bool {
+// false
+// }
+// }
+// struct B {
+// a: A,
+// }
+//
+// impl SomeTrait for B {
+// type T = <A as SomeTrait>::T;
+//
+// fn fn_(arg: u32) -> u32 {
+// <A as SomeTrait>::fn_(arg)
+// }
+//
+// fn method_(&mut self) -> bool {
+// <A as SomeTrait>::method_( &mut self.a )
+// }
+// }
+// ```
+pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
+
+ let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
+ Some(field) => Field::new(&ctx, Either::Left(field))?,
+ None => {
+ let field = ctx.find_node_at_offset::<ast::TupleField>()?;
+ let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
+ Field::new(&ctx, either::Right((field, field_list)))?
+ }
+ };
+
+ strukt.delegate(field, acc, ctx);
+ Some(())
+}
+
+/// A utility object that represents a struct's field.
+struct Field {
+ name: String,
+ ty: ast::Type,
+ range: syntax::TextRange,
+ impls: Vec<Delegee>,
+}
+
+impl Field {
+ pub(crate) fn new(
+ ctx: &AssistContext<'_>,
+ f: Either<ast::RecordField, (ast::TupleField, ast::TupleFieldList)>,
+ ) -> Option<Field> {
+ let db = ctx.sema.db;
+ let name: String;
+ let range: syntax::TextRange;
+ let ty: ast::Type;
+
+ let module = ctx.sema.to_module_def(ctx.file_id())?;
+
+ match f {
+ Either::Left(f) => {
+ name = f.name()?.to_string();
+ ty = f.ty()?;
+ range = f.syntax().text_range();
+ }
+ Either::Right((f, l)) => {
+ name = l.fields().position(|it| it == f)?.to_string();
+ ty = f.ty()?;
+ range = f.syntax().text_range();
+ }
+ };
+
+ let hir_ty = ctx.sema.resolve_type(&ty)?;
+ let type_impls = hir::Impl::all_for_type(db, hir_ty.clone());
+ let mut impls = Vec::with_capacity(type_impls.len());
+ let type_param = hir_ty.as_type_param(db);
+
+ if let Some(tp) = type_param {
+ for tb in tp.trait_bounds(db) {
+ impls.push(Delegee::Bound(BoundCase(tb)));
+ }
+ };
+
+ for imp in type_impls {
+ match imp.trait_(db) {
+ Some(tr) => {
+ if tr.is_visible_from(db, module) {
+ impls.push(Delegee::Impls(ImplCase(tr, imp)))
+ }
+ }
+ None => (),
+ }
+ }
+
+ Some(Field { name, ty, range, impls })
+ }
+}
+
+/// A field that we want to delegate can offer the enclosing struct
+/// trait to implement in two ways. The first way is when the field
+/// actually implements the trait and the second way is when the field
+/// has a bound type parameter. We handle these cases in different ways
+/// hence the enum.
+enum Delegee {
+ Bound(BoundCase),
+ Impls(ImplCase),
+}
+
+struct BoundCase(hir::Trait);
+struct ImplCase(hir::Trait, hir::Impl);
+
+impl Delegee {
+ fn signature(&self, db: &dyn HirDatabase) -> String {
+ let mut s = String::new();
+
+ let (Delegee::Bound(BoundCase(it)) | Delegee::Impls(ImplCase(it, _))) = self;
+
+ for m in it.module(db).path_to_root(db).iter().rev() {
+ if let Some(name) = m.name(db) {
+ s.push_str(&format!("{}::", name.to_smol_str()));
+ }
+ }
+
+ s.push_str(&it.name(db).to_smol_str());
+ s
+ }
+}
+
+/// A utility struct that is used for the enclosing struct.
+struct Struct {
+ strukt: ast::Struct,
+ name: ast::Name,
+}
+
+impl Struct {
+ pub(crate) fn new(s: ast::Struct) -> Option<Self> {
+ let name = s.name()?;
+ Some(Struct { name, strukt: s })
+ }
+
+ pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) {
+ let db = ctx.db();
+ for delegee in &field.impls {
+ // FIXME : We can omit already implemented impl_traits
+ // But we don't know what the &[hir::Type] argument should look like.
+
+ // let trait_ = match delegee {
+ // Delegee::Bound(b) => b.0,
+ // Delegee::Impls(i) => i.1,
+ // };
+
+ // if self.hir_ty.impls_trait(db, trait_, &[]) {
+ // continue;
+ // }
+ let signature = delegee.signature(db);
+ let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else {
+ continue;
+ };
+
+ acc.add_group(
+ &GroupLabel("Delegate trait impl for field...".to_owned()),
+ AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate),
+ format!("Generate delegate impl `{}` for `{}`", signature, field.name),
+ field.range,
+ |builder| {
+ builder.insert(
+ self.strukt.syntax().text_range().end(),
+ format!("\n\n{}", delegate.syntax()),
+ );
+ },
+ );
+ }
+ }
+}
+
+fn generate_impl(
+ ctx: &AssistContext<'_>,
+ strukt: &Struct,
+ field_ty: &ast::Type,
+ field_name: &String,
+ delegee: &Delegee,
+) -> Option<ast::Impl> {
+ let delegate: ast::Impl;
+ let source: ast::Impl;
+ let genpar: Option<ast::GenericParamList>;
+ let db = ctx.db();
+ let base_path = make::path_from_text(&field_ty.to_string().as_str());
+ let s_path = make::ext::ident_path(&strukt.name.to_string());
+
+ match delegee {
+ Delegee::Bound(delegee) => {
+ let in_file = ctx.sema.source(delegee.0.to_owned())?;
+ let source: ast::Trait = in_file.value;
+
+ delegate = make::impl_trait(
+ delegee.0.is_unsafe(db),
+ None,
+ None,
+ strukt.strukt.generic_param_list(),
+ None,
+ delegee.0.is_auto(db),
+ make::ty(&delegee.0.name(db).to_smol_str()),
+ make::ty_path(s_path),
+ source.where_clause(),
+ strukt.strukt.where_clause(),
+ None,
+ )
+ .clone_for_update();
+
+ genpar = source.generic_param_list();
+ let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
+ let gen_args: String =
+ genpar.map_or_else(String::new, |params| params.to_generic_args().to_string());
+
+ // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
+ let qualified_path_type = make::path_from_text(&format!(
+ "<{} as {}{}>",
+ base_path.to_string(),
+ delegee.0.name(db).to_smol_str(),
+ gen_args.to_string()
+ ));
+
+ match source.assoc_item_list() {
+ Some(ai) => {
+ ai.assoc_items()
+ .filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
+ .for_each(|item| {
+ let assoc =
+ process_assoc_item(item, qualified_path_type.clone(), &field_name);
+ if let Some(assoc) = assoc {
+ delegate_assoc_items.add_item(assoc);
+ }
+ });
+ }
+ None => {}
+ };
+
+ let target = ctx.sema.scope(strukt.strukt.syntax())?;
+ let source = ctx.sema.scope(source.syntax())?;
+
+ let transform =
+ PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
+ transform.apply(&delegate.syntax());
+ }
+ Delegee::Impls(delegee) => {
+ let in_file = ctx.sema.source(delegee.1.to_owned())?;
+ source = in_file.value;
+ delegate = make::impl_trait(
+ delegee.0.is_unsafe(db),
+ source.generic_param_list(),
+ None,
+ None,
+ None,
+ delegee.0.is_auto(db),
+ make::ty(&delegee.0.name(db).to_smol_str()),
+ make::ty_path(s_path),
+ source.where_clause(),
+ strukt.strukt.where_clause(),
+ None,
+ )
+ .clone_for_update();
+ genpar = source.generic_param_list();
+ let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
+ let gen_args: String =
+ genpar.map_or_else(String::new, |params| params.to_generic_args().to_string());
+
+ // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
+ let qualified_path_type = make::path_from_text(&format!(
+ "<{} as {}{}>",
+ base_path.to_string().as_str(),
+ delegee.0.name(db).to_smol_str(),
+ gen_args.to_string().as_str()
+ ));
+
+ source
+ .get_or_create_assoc_item_list()
+ .assoc_items()
+ .filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
+ .for_each(|item| {
+ let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name);
+ if let Some(assoc) = assoc {
+ delegate_assoc_items.add_item(assoc);
+ }
+ });
+
+ let target = ctx.sema.scope(strukt.strukt.syntax())?;
+ let source = ctx.sema.scope(source.syntax())?;
+
+ let transform =
+ PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
+ transform.apply(&delegate.syntax());
+ }
+ }
+
+ Some(delegate)
+}
+
+fn process_assoc_item(
+ item: syntax::ast::AssocItem,
+ qual_path_ty: ast::Path,
+ base_name: &str,
+) -> Option<ast::AssocItem> {
+ match item {
+ AssocItem::Const(c) => const_assoc_item(c, qual_path_ty),
+ AssocItem::Fn(f) => func_assoc_item(f, qual_path_ty, base_name),
+ AssocItem::MacroCall(_) => {
+ // FIXME : Handle MacroCall case.
+ // macro_assoc_item(mac, qual_path_ty)
+ None
+ }
+ AssocItem::TypeAlias(ta) => ty_assoc_item(ta, qual_path_ty),
+ }
+}
+
+fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option<AssocItem> {
+ let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
+
+ // We want rhs of the const assignment to be a qualified path
+ // The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
+ // The qualified will have the following generic syntax :
+ // <Base as Trait<GenArgs>>::ConstName;
+ // FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
+ // make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap());
+ let qualpath = qualpath(qual_path_ty, path_expr_segment);
+ let inner =
+ make::item_const(item.visibility(), item.name()?, item.ty()?, make::expr_path(qualpath))
+ .clone_for_update();
+
+ Some(AssocItem::Const(inner))
+}
+
+fn func_assoc_item(
+ item: syntax::ast::Fn,
+ qual_path_ty: Path,
+ base_name: &str,
+) -> Option<AssocItem> {
+ let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
+ let qualpath = qualpath(qual_path_ty, path_expr_segment);
+
+ let call = match item.param_list() {
+ // Methods and funcs should be handled separately.
+ // We ask if the func has a `self` param.
+ Some(l) => match l.self_param() {
+ Some(slf) => {
+ let mut self_kw = make::expr_path(make::path_from_text("self"));
+ self_kw = make::expr_field(self_kw, base_name);
+
+ let tail_expr_self = match slf.kind() {
+ ast::SelfParamKind::Owned => self_kw,
+ ast::SelfParamKind::Ref => make::expr_ref(self_kw, false),
+ ast::SelfParamKind::MutRef => make::expr_ref(self_kw, true),
+ };
+
+ let param_count = l.params().count();
+ let args = convert_param_list_to_arg_list(l).clone_for_update();
+
+ if param_count > 0 {
+ // Add SelfParam and a TOKEN::COMMA
+ ted::insert_all(
+ Position::after(args.l_paren_token()?),
+ vec![
+ NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
+ NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
+ NodeOrToken::Token(make::token(SyntaxKind::COMMA)),
+ ],
+ );
+ } else {
+ // Add SelfParam only
+ ted::insert(
+ Position::after(args.l_paren_token()?),
+ NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
+ );
+ }
+
+ make::expr_call(make::expr_path(qualpath), args)
+ }
+ None => make::expr_call(make::expr_path(qualpath), convert_param_list_to_arg_list(l)),
+ },
+ None => make::expr_call(
+ make::expr_path(qualpath),
+ convert_param_list_to_arg_list(make::param_list(None, Vec::new())),
+ ),
+ }
+ .clone_for_update();
+
+ let body = make::block_expr(vec![], Some(call)).clone_for_update();
+ let func = make::fn_(
+ item.visibility(),
+ item.name()?,
+ item.generic_param_list(),
+ item.where_clause(),
+ item.param_list()?,
+ body,
+ item.ret_type(),
+ item.async_token().is_some(),
+ item.const_token().is_some(),
+ item.unsafe_token().is_some(),
+ )
+ .clone_for_update();
+
+ Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update()))
+}
+
+fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> {
+ let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
+ let qualpath = qualpath(qual_path_ty, path_expr_segment);
+ let ty = make::ty_path(qualpath);
+ let ident = item.name()?.to_string();
+
+ let alias = make::ty_alias(
+ ident.as_str(),
+ item.generic_param_list(),
+ None,
+ item.where_clause(),
+ Some((ty, None)),
+ )
+ .clone_for_update();
+
+ Some(AssocItem::TypeAlias(alias))
+}
+
+fn qualpath(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
+ make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string()))
+}
+
+#[cfg(test)]
+mod test {
+
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_tuple_struct_basic() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S(B$0ase);
+trait Trait {}
+impl Trait for Base {}
+"#,
+ r#"
+struct Base;
+struct S(Base);
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_struct_basic() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ba$0se : Base
+}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ r#"
+struct Base;
+struct S {
+ base : Base
+}
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ )
+ }
+
+ // Structs need to be by def populated with fields
+ // However user can invoke this assist while still editing
+ // We therefore assert its non-applicability
+ #[test]
+ fn test_yet_empty_struct() {
+ check_assist_not_applicable(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ $0
+}
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_yet_unspecified_field_type() {
+ check_assist_not_applicable(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ab$0c
+}
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unsafe_trait() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ba$0se : Base
+}
+unsafe trait Trait {}
+unsafe impl Trait for Base {}
+"#,
+ r#"
+struct Base;
+struct S {
+ base : Base
+}
+
+unsafe impl Trait for S {}
+unsafe trait Trait {}
+unsafe impl Trait for Base {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unsafe_trait_with_unsafe_fn() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ba$0se: Base,
+}
+
+unsafe trait Trait {
+ unsafe fn a_func();
+ unsafe fn a_method(&self);
+}
+unsafe impl Trait for Base {
+ unsafe fn a_func() {}
+ unsafe fn a_method(&self) {}
+}
+"#,
+ r#"
+struct Base;
+struct S {
+ base: Base,
+}
+
+unsafe impl Trait for S {
+ unsafe fn a_func() {
+ <Base as Trait>::a_func()
+ }
+
+ unsafe fn a_method(&self) {
+ <Base as Trait>::a_method( &self.base )
+ }
+}
+
+unsafe trait Trait {
+ unsafe fn a_func();
+ unsafe fn a_method(&self);
+}
+unsafe impl Trait for Base {
+ unsafe fn a_func() {}
+ unsafe fn a_method(&self) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_with_where_clause() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b$0 : T,
+}"#,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b : T,
+}
+
+impl<T> AnotherTrait for S<T>
+where
+ T: AnotherTrait,
+{}"#,
+ );
+ }
+
+ #[test]
+ fn test_complex_without_where() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field$0: Base
+}
+
+impl<'a, T, const C: usize> Trait<'a, T, C> for Base {
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ r#"
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field: Base
+}
+
+impl<'a, T, const C: usize> Trait<'a, T, C> for S {
+ type AssocType = <Base as Trait<'a, T, C>>::AssocType;
+
+ const AssocConst: usize = <Base as Trait<'a, T, C>>::AssocConst;
+
+ fn assoc_fn(p: ()) {
+ <Base as Trait<'a, T, C>>::assoc_fn(p)
+ }
+
+ fn assoc_method(&self, p: ()) {
+ <Base as Trait<'a, T, C>>::assoc_method( &self.field , p)
+ }
+}
+
+impl<'a, T, const C: usize> Trait<'a, T, C> for Base {
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_complex_two() {
+ check_assist(
+ generate_delegate_trait,
+ r"
+trait AnotherTrait {}
+
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ fi$0eld: Base,
+}
+
+impl<'b, C, const D: usize> Trait<'b, C, D> for Base
+where
+ C: AnotherTrait,
+{
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}",
+ r#"
+trait AnotherTrait {}
+
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field: Base,
+}
+
+impl<'b, C, const D: usize> Trait<'b, C, D> for S
+where
+ C: AnotherTrait,
+{
+ type AssocType = <Base as Trait<'b, C, D>>::AssocType;
+
+ const AssocConst: usize = <Base as Trait<'b, C, D>>::AssocConst;
+
+ fn assoc_fn(p: ()) {
+ <Base as Trait<'b, C, D>>::assoc_fn(p)
+ }
+
+ fn assoc_method(&self, p: ()) {
+ <Base as Trait<'b, C, D>>::assoc_method( &self.field , p)
+ }
+}
+
+impl<'b, C, const D: usize> Trait<'b, C, D> for Base
+where
+ C: AnotherTrait,
+{
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_complex_three() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+trait YetAnotherTrait {}
+
+struct StructImplsAll();
+impl AnotherTrait for StructImplsAll {}
+impl YetAnotherTrait for StructImplsAll {}
+
+trait Trait<'a, T, const C: usize> {
+ type A;
+ const ASSOC_CONST: usize = C;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ fi$0eld: Base,
+}
+
+impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for Base
+where
+ A: AnotherTrait,
+{
+ type A = i32;
+
+ const ASSOC_CONST: usize = B;
+
+ fn assoc_fn(p: ()) {}
+
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ r#"
+trait AnotherTrait {}
+trait YetAnotherTrait {}
+
+struct StructImplsAll();
+impl AnotherTrait for StructImplsAll {}
+impl YetAnotherTrait for StructImplsAll {}
+
+trait Trait<'a, T, const C: usize> {
+ type A;
+ const ASSOC_CONST: usize = C;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field: Base,
+}
+
+impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for S
+where
+ A: AnotherTrait,
+{
+ type A = <Base as Trait<'b, A, B>>::A;
+
+ const ASSOC_CONST: usize = <Base as Trait<'b, A, B>>::ASSOC_CONST;
+
+ fn assoc_fn(p: ()) {
+ <Base as Trait<'b, A, B>>::assoc_fn(p)
+ }
+
+ fn assoc_method(&self, p: ()) {
+ <Base as Trait<'b, A, B>>::assoc_method( &self.field , p)
+ }
+}
+
+impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for Base
+where
+ A: AnotherTrait,
+{
+ type A = i32;
+
+ const ASSOC_CONST: usize = B;
+
+ fn assoc_fn(p: ()) {}
+
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_type_bound() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b$0: T,
+}"#,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b: T,
+}
+
+impl<T> AnotherTrait for S<T>
+where
+ T: AnotherTrait,
+{}"#,
+ );
+ }
+
+ #[test]
+ fn test_docstring_example() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a$0: A,
+}
+"#,
+ r#"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a: A,
+}
+
+impl SomeTrait for B {
+ type T = <A as SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <A as SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <A as SomeTrait>::method_( &mut self.a )
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn import_from_other_mod() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+mod some_module {
+ pub trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+ }
+ pub struct A;
+ impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+ }
+}
+
+struct B {
+ a$0: some_module::A,
+}"#,
+ r#"
+mod some_module {
+ pub trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+ }
+ pub struct A;
+ impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+ }
+}
+
+struct B {
+ a: some_module::A,
+}
+
+impl some_module::SomeTrait for B {
+ type T = <some_module::A as some_module::SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <some_module::A as some_module::SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <some_module::A as some_module::SomeTrait>::method_( &mut self.a )
+ }
+}"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
index 78ac2eb30..747f70f9f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
@@ -1,7 +1,6 @@
use syntax::{
- ast::{self, edit::IndentLevel, AstNode, HasAttrs},
- SyntaxKind::{COMMENT, WHITESPACE},
- TextSize,
+ ast::{self, edit_in_place::AttrsOwnerEdit, make, AstNode, HasAttrs},
+ T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -27,48 +26,37 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let cap = ctx.config.snippet_cap?;
let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
- let node_start = derive_insertion_offset(&nominal)?;
let target = nominal.syntax().text_range();
- acc.add(
- AssistId("generate_derive", AssistKind::Generate),
- "Add `#[derive]`",
- target,
- |builder| {
- let derive_attr = nominal
- .attrs()
- .filter_map(|x| x.as_simple_call())
- .filter(|(name, _arg)| name == "derive")
- .map(|(_name, arg)| arg)
- .next();
- match derive_attr {
- None => {
- let indent_level = IndentLevel::from_node(nominal.syntax());
- builder.insert_snippet(
- cap,
- node_start,
- format!("#[derive($0)]\n{indent_level}"),
- );
- }
- Some(tt) => {
- // Just move the cursor.
- builder.insert_snippet(
- cap,
- tt.syntax().text_range().end() - TextSize::of(')'),
- "$0",
- )
- }
- };
- },
- )
-}
+ acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| {
+ let derive_attr = nominal
+ .attrs()
+ .filter_map(|x| x.as_simple_call())
+ .filter(|(name, _arg)| name == "derive")
+ .map(|(_name, arg)| arg)
+ .next();
+ match derive_attr {
+ None => {
+ let derive = make::attr_outer(make::meta_token_tree(
+ make::ext::ident_path("derive"),
+ make::token_tree(T!['('], vec![]).clone_for_update(),
+ ))
+ .clone_for_update();
+
+ let nominal = edit.make_mut(nominal);
+ nominal.add_attr(derive.clone());
-// Insert `derive` after doc comments.
-fn derive_insertion_offset(nominal: &ast::Adt) -> Option<TextSize> {
- let non_ws_child = nominal
- .syntax()
- .children_with_tokens()
- .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
- Some(non_ws_child.text_range().start())
+ edit.add_tabstop_before_token(
+ cap,
+ derive.meta().unwrap().token_tree().unwrap().r_paren_token().unwrap(),
+ );
+ }
+ Some(tt) => {
+ // Just move the cursor.
+ let tt = edit.make_mut(tt);
+ edit.add_tabstop_before_token(cap, tt.right_delimiter_token().unwrap());
+ }
+ };
+ })
}
#[cfg(test)]
@@ -115,6 +103,38 @@ mod m {
}
#[test]
+ fn add_derive_existing_with_brackets() {
+ check_assist(
+ generate_derive,
+ "
+#[derive[Clone]]
+struct Foo { a: i32$0, }
+",
+ "
+#[derive[Clone$0]]
+struct Foo { a: i32, }
+",
+ );
+ }
+
+ #[test]
+ fn add_derive_existing_missing_delimiter() {
+ // since `#[derive]` isn't a simple attr call (i.e. `#[derive()]`)
+ // we don't consider it as a proper derive attr and generate a new
+ // one instead
+ check_assist(
+ generate_derive,
+ "
+#[derive]
+struct Foo { a: i32$0, }",
+ "
+#[derive]
+#[derive($0)]
+struct Foo { a: i32, }",
+ );
+ }
+
+ #[test]
fn add_derive_new_with_doc_comment() {
check_assist(
generate_derive,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index c579f6780..5b13e01b1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -623,7 +623,9 @@ fn fn_generic_params(
fn params_and_where_preds_in_scope(
ctx: &AssistContext<'_>,
) -> (Vec<ast::GenericParam>, Vec<ast::WherePred>) {
- let Some(body) = containing_body(ctx) else { return Default::default(); };
+ let Some(body) = containing_body(ctx) else {
+ return Default::default();
+ };
let mut generic_params = Vec::new();
let mut where_clauses = Vec::new();
@@ -1876,7 +1878,6 @@ where
#[test]
fn add_function_with_fn_arg() {
- // FIXME: The argument in `bar` is wrong.
check_assist(
generate_function,
r"
@@ -1897,7 +1898,7 @@ fn foo() {
bar(Baz::new);
}
-fn bar(new: fn) ${0:-> _} {
+fn bar(new: fn() -> Baz) ${0:-> _} {
todo!()
}
",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index dd6bbd84a..9c9478b04 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -1,4 +1,4 @@
-use ide_db::famous_defs::FamousDefs;
+use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder};
use stdx::{format_to, to_lower_snake_case};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility},
@@ -10,6 +10,66 @@ use crate::{
AssistContext, AssistId, AssistKind, Assists, GroupLabel,
};
+// Assist: generate_setter
+//
+// Generate a setter method.
+//
+// ```
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn $0set_name(&mut self, name: String) {
+// self.name = name;
+// }
+// }
+// ```
+pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // This if condition denotes two modes this assist can work in:
+ // - First is acting upon selection of record fields
+ // - Next is acting upon a single record field
+ //
+ // This is the only part where implementation diverges a bit,
+ // subsequent code is generic for both of these modes
+
+ let (strukt, info_of_record_fields, mut fn_names) = extract_and_parse(ctx, AssistType::Set)?;
+
+ // No record fields to do work on :(
+ if info_of_record_fields.len() == 0 {
+ return None;
+ }
+
+ // Prepend set_ to fn names.
+ fn_names.iter_mut().for_each(|name| *name = format!("set_{}", name));
+
+ // Return early if we've found an existing fn
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;
+
+ // Computing collective text range of all record fields in selected region
+ let target: TextRange = info_of_record_fields
+ .iter()
+ .map(|record_field_info| record_field_info.target)
+ .reduce(|acc, target| acc.cover(target))?;
+
+ let setter_info = AssistInfo { impl_def, strukt, assist_type: AssistType::Set };
+
+ acc.add_group(
+ &GroupLabel("Generate getter/setter".to_owned()),
+ AssistId("generate_setter", AssistKind::Generate),
+ "Generate a setter method",
+ target,
+ |builder| build_source_change(builder, ctx, info_of_record_fields, setter_info),
+ );
+ Some(())
+}
+
// Assist: generate_getter
//
// Generate a getter method.
@@ -83,10 +143,16 @@ struct RecordFieldInfo {
target: TextRange,
}
-struct GetterInfo {
+struct AssistInfo {
impl_def: Option<ast::Impl>,
strukt: ast::Struct,
- mutable: bool,
+ assist_type: AssistType,
+}
+
+enum AssistType {
+ Get,
+ MutGet,
+ Set,
}
pub(crate) fn generate_getter_impl(
@@ -94,40 +160,8 @@ pub(crate) fn generate_getter_impl(
ctx: &AssistContext<'_>,
mutable: bool,
) -> Option<()> {
- // This if condition denotes two modes this assist can work in:
- // - First is acting upon selection of record fields
- // - Next is acting upon a single record field
- //
- // This is the only part where implementation diverges a bit,
- // subsequent code is generic for both of these modes
-
- let (strukt, info_of_record_fields, fn_names) = if !ctx.has_empty_selection() {
- // Selection Mode
- let node = ctx.covering_element();
-
- let node = match node {
- syntax::NodeOrToken::Node(n) => n,
- syntax::NodeOrToken::Token(t) => t.parent()?,
- };
-
- let parent_struct = node.ancestors().find_map(ast::Struct::cast)?;
-
- let (info_of_record_fields, field_names) =
- extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), mutable)?;
-
- (parent_struct, info_of_record_fields, field_names)
- } else {
- // Single Record Field mode
- let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- let field = ctx.find_node_at_offset::<ast::RecordField>()?;
-
- let record_field_info = parse_record_field(field, mutable)?;
-
- let fn_name = record_field_info.fn_name.clone();
-
- (strukt, vec![record_field_info], vec![fn_name])
- };
-
+ let (strukt, info_of_record_fields, fn_names) =
+ extract_and_parse(ctx, if mutable { AssistType::MutGet } else { AssistType::Get })?;
// No record fields to do work on :(
if info_of_record_fields.len() == 0 {
return None;
@@ -147,98 +181,30 @@ pub(crate) fn generate_getter_impl(
.map(|record_field_info| record_field_info.target)
.reduce(|acc, target| acc.cover(target))?;
- let getter_info = GetterInfo { impl_def, strukt, mutable };
+ let getter_info = AssistInfo {
+ impl_def,
+ strukt,
+ assist_type: if mutable { AssistType::MutGet } else { AssistType::Get },
+ };
acc.add_group(
&GroupLabel("Generate getter/setter".to_owned()),
AssistId(id, AssistKind::Generate),
label,
target,
- |builder| {
- let record_fields_count = info_of_record_fields.len();
-
- let mut buf = String::with_capacity(512);
-
- // Check if an impl exists
- if let Some(impl_def) = &getter_info.impl_def {
- // Check if impl is empty
- if let Some(assoc_item_list) = impl_def.assoc_item_list() {
- if assoc_item_list.assoc_items().next().is_some() {
- // If not empty then only insert a new line
- buf.push('\n');
- }
- }
- }
-
- for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
- // this buf inserts a newline at the end of a getter
- // automatically, if one wants to add one more newline
- // for separating it from other assoc items, that needs
- // to be handled separately
- let mut getter_buf =
- generate_getter_from_info(ctx, &getter_info, record_field_info);
-
- // Insert `$0` only for last getter we generate
- if i == record_fields_count - 1 {
- if ctx.config.snippet_cap.is_some() {
- getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
- }
- }
-
- // For first element we do not merge with '\n', as
- // that can be inserted by impl_def check defined
- // above, for other cases which are:
- //
- // - impl exists but it empty, here we would ideally
- // not want to keep newline between impl <struct> {
- // and fn <fn-name>() { line
- //
- // - next if impl itself does not exist, in this
- // case we ourselves generate a new impl and that
- // again ends up with the same reasoning as above
- // for not keeping newline
- if i == 0 {
- buf = buf + &getter_buf;
- } else {
- buf = buf + "\n" + &getter_buf;
- }
-
- // We don't insert a new line at the end of
- // last getter as it will end up in the end
- // of an impl where we would not like to keep
- // getter and end of impl ( i.e. `}` ) with an
- // extra line for no reason
- if i < record_fields_count - 1 {
- buf = buf + "\n";
- }
- }
-
- let start_offset = getter_info
- .impl_def
- .as_ref()
- .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf))
- .unwrap_or_else(|| {
- buf = generate_impl_text(&ast::Adt::Struct(getter_info.strukt.clone()), &buf);
- getter_info.strukt.syntax().text_range().end()
- });
-
- match ctx.config.snippet_cap {
- Some(cap) => builder.insert_snippet(cap, start_offset, buf),
- None => builder.insert(start_offset, buf),
- }
- },
+ |builder| build_source_change(builder, ctx, info_of_record_fields, getter_info),
)
}
fn generate_getter_from_info(
ctx: &AssistContext<'_>,
- info: &GetterInfo,
+ info: &AssistInfo,
record_field_info: &RecordFieldInfo,
) -> String {
let mut buf = String::with_capacity(512);
let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} "));
- let (ty, body) = if info.mutable {
+ let (ty, body) = if matches!(info.assist_type, AssistType::MutGet) {
(
format!("&mut {}", record_field_info.field_ty),
format!("&mut self.{}", record_field_info.field_name),
@@ -273,7 +239,7 @@ fn generate_getter_from_info(
}}",
vis,
record_field_info.fn_name,
- info.mutable.then_some("mut ").unwrap_or_default(),
+ matches!(info.assist_type, AssistType::MutGet).then_some("mut ").unwrap_or_default(),
ty,
body,
);
@@ -281,10 +247,58 @@ fn generate_getter_from_info(
buf
}
+fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> String {
+ let mut buf = String::with_capacity(512);
+ let strukt = &info.strukt;
+ let fn_name = &record_field_info.fn_name;
+ let field_ty = &record_field_info.field_ty;
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
+ format_to!(
+ buf,
+ " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{
+ self.{fn_name} = {fn_name};
+ }}"
+ );
+
+ buf
+}
+
+fn extract_and_parse(
+ ctx: &AssistContext<'_>,
+ assist_type: AssistType,
+) -> Option<(ast::Struct, Vec<RecordFieldInfo>, Vec<String>)> {
+ // This if condition denotes two modes assists can work in:
+ // - First is acting upon selection of record fields
+ // - Next is acting upon a single record field
+ if !ctx.has_empty_selection() {
+ // Selection Mode
+ let node = ctx.covering_element();
+
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ let parent_struct = node.ancestors().find_map(ast::Struct::cast)?;
+
+ let (info_of_record_fields, field_names) =
+ extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), &assist_type)?;
+
+ return Some((parent_struct, info_of_record_fields, field_names));
+ }
+
+ // Single Record Field mode
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+ let record_field_info = parse_record_field(field, &assist_type)?;
+ let fn_name = record_field_info.fn_name.clone();
+ Some((strukt, vec![record_field_info], vec![fn_name]))
+}
+
fn extract_and_parse_record_fields(
node: &ast::Struct,
selection_range: TextRange,
- mutable: bool,
+ assist_type: &AssistType,
) -> Option<(Vec<RecordFieldInfo>, Vec<String>)> {
let mut field_names: Vec<String> = vec![];
let field_list = node.field_list()?;
@@ -295,7 +309,7 @@ fn extract_and_parse_record_fields(
.fields()
.filter_map(|record_field| {
if selection_range.contains_range(record_field.syntax().text_range()) {
- let record_field_info = parse_record_field(record_field, mutable)?;
+ let record_field_info = parse_record_field(record_field, assist_type)?;
field_names.push(record_field_info.fn_name.clone());
return Some(record_field_info);
}
@@ -316,12 +330,15 @@ fn extract_and_parse_record_fields(
}
}
-fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<RecordFieldInfo> {
+fn parse_record_field(
+ record_field: ast::RecordField,
+ assist_type: &AssistType,
+) -> Option<RecordFieldInfo> {
let field_name = record_field.name()?;
let field_ty = record_field.ty()?;
let mut fn_name = to_lower_snake_case(&field_name.to_string());
- if mutable {
+ if matches!(assist_type, AssistType::MutGet) {
format_to!(fn_name, "_mut");
}
@@ -330,8 +347,89 @@ fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<R
Some(RecordFieldInfo { field_name, field_ty, fn_name, target })
}
+fn build_source_change(
+ builder: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ info_of_record_fields: Vec<RecordFieldInfo>,
+ assist_info: AssistInfo,
+) {
+ let record_fields_count = info_of_record_fields.len();
+
+ let mut buf = String::with_capacity(512);
+
+ // Check if an impl exists
+ if let Some(impl_def) = &assist_info.impl_def {
+ // Check if impl is empty
+ if let Some(assoc_item_list) = impl_def.assoc_item_list() {
+ if assoc_item_list.assoc_items().next().is_some() {
+ // If not empty then only insert a new line
+ buf.push('\n');
+ }
+ }
+ }
+
+ for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
+ // this buf inserts a newline at the end of a getter
+ // automatically, if one wants to add one more newline
+ // for separating it from other assoc items, that needs
+ // to be handled separately
+ let mut getter_buf = match assist_info.assist_type {
+ AssistType::Set => generate_setter_from_info(&assist_info, record_field_info),
+ _ => generate_getter_from_info(ctx, &assist_info, record_field_info),
+ };
+
+ // Insert `$0` only for last getter we generate
+ if i == record_fields_count - 1 {
+ if ctx.config.snippet_cap.is_some() {
+ getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
+ }
+ }
+
+ // For first element we do not merge with '\n', as
+ // that can be inserted by impl_def check defined
+ // above, for other cases which are:
+ //
+ // - impl exists but it empty, here we would ideally
+ // not want to keep newline between impl <struct> {
+ // and fn <fn-name>() { line
+ //
+ // - next if impl itself does not exist, in this
+ // case we ourselves generate a new impl and that
+ // again ends up with the same reasoning as above
+ // for not keeping newline
+ if i == 0 {
+ buf = buf + &getter_buf;
+ } else {
+ buf = buf + "\n" + &getter_buf;
+ }
+
+ // We don't insert a new line at the end of
+ // last getter as it will end up in the end
+ // of an impl where we would not like to keep
+ // getter and end of impl ( i.e. `}` ) with an
+ // extra line for no reason
+ if i < record_fields_count - 1 {
+ buf = buf + "\n";
+ }
+ }
+
+ let start_offset = assist_info
+ .impl_def
+ .as_ref()
+ .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(assist_info.strukt.clone()), &buf);
+ assist_info.strukt.syntax().text_range().end()
+ });
+
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, start_offset, buf),
+ None => builder.insert(start_offset, buf),
+ }
+}
+
#[cfg(test)]
-mod tests {
+mod tests_getter {
use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
use super::*;
@@ -812,3 +910,105 @@ impl Context {
);
}
}
+
+#[cfg(test)]
+mod tests_setter {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check_not_applicable(ra_fixture: &str) {
+ check_assist_not_applicable(generate_setter, ra_fixture)
+ }
+
+ #[test]
+ fn test_generate_setter_from_field() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn $0set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_already_implemented() {
+ check_not_applicable(
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_from_field_with_visibility_marker() {
+ check_assist(
+ generate_setter,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ pub(crate) fn $0set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_setter() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ cou$0nt: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ count: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+
+ fn $0set_count(&mut self, count: usize) {
+ self.count = count;
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
deleted file mode 100644
index 62f72df1c..000000000
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
+++ /dev/null
@@ -1,175 +0,0 @@
-use stdx::{format_to, to_lower_snake_case};
-use syntax::ast::{self, AstNode, HasName, HasVisibility};
-
-use crate::{
- utils::{find_impl_block_end, find_struct_impl, generate_impl_text},
- AssistContext, AssistId, AssistKind, Assists, GroupLabel,
-};
-
-// Assist: generate_setter
-//
-// Generate a setter method.
-//
-// ```
-// struct Person {
-// nam$0e: String,
-// }
-// ```
-// ->
-// ```
-// struct Person {
-// name: String,
-// }
-//
-// impl Person {
-// fn set_name(&mut self, name: String) {
-// self.name = name;
-// }
-// }
-// ```
-pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- let field = ctx.find_node_at_offset::<ast::RecordField>()?;
-
- let field_name = field.name()?;
- let field_ty = field.ty()?;
-
- // Return early if we've found an existing fn
- let fn_name = to_lower_snake_case(&field_name.to_string());
- let impl_def =
- find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[format!("set_{fn_name}")])?;
-
- let target = field.syntax().text_range();
- acc.add_group(
- &GroupLabel("Generate getter/setter".to_owned()),
- AssistId("generate_setter", AssistKind::Generate),
- "Generate a setter method",
- target,
- |builder| {
- let mut buf = String::with_capacity(512);
-
- if impl_def.is_some() {
- buf.push('\n');
- }
-
- let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
- format_to!(
- buf,
- " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{
- self.{fn_name} = {fn_name};
- }}"
- );
-
- let start_offset = impl_def
- .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
- .unwrap_or_else(|| {
- buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
- strukt.syntax().text_range().end()
- });
-
- builder.insert(start_offset, buf);
- },
- )
-}
-
-#[cfg(test)]
-mod tests {
- use crate::tests::{check_assist, check_assist_not_applicable};
-
- use super::*;
-
- fn check_not_applicable(ra_fixture: &str) {
- check_assist_not_applicable(generate_setter, ra_fixture)
- }
-
- #[test]
- fn test_generate_setter_from_field() {
- check_assist(
- generate_setter,
- r#"
-struct Person<T: Clone> {
- dat$0a: T,
-}"#,
- r#"
-struct Person<T: Clone> {
- data: T,
-}
-
-impl<T: Clone> Person<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- );
- }
-
- #[test]
- fn test_generate_setter_already_implemented() {
- check_not_applicable(
- r#"
-struct Person<T: Clone> {
- dat$0a: T,
-}
-
-impl<T: Clone> Person<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- );
- }
-
- #[test]
- fn test_generate_setter_from_field_with_visibility_marker() {
- check_assist(
- generate_setter,
- r#"
-pub(crate) struct Person<T: Clone> {
- dat$0a: T,
-}"#,
- r#"
-pub(crate) struct Person<T: Clone> {
- data: T,
-}
-
-impl<T: Clone> Person<T> {
- pub(crate) fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- );
- }
-
- #[test]
- fn test_multiple_generate_setter() {
- check_assist(
- generate_setter,
- r#"
-struct Context<T: Clone> {
- data: T,
- cou$0nt: usize,
-}
-
-impl<T: Clone> Context<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- r#"
-struct Context<T: Clone> {
- data: T,
- count: usize,
-}
-
-impl<T: Clone> Context<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-
- fn set_count(&mut self, count: usize) {
- self.count = count;
- }
-}"#,
- );
- }
-}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
new file mode 100644
index 000000000..0f67380d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -0,0 +1,429 @@
+use crate::assist_context::{AssistContext, Assists};
+use ide_db::assists::AssistId;
+use syntax::{
+ ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility},
+ ted, AstNode, SyntaxKind,
+};
+
+// NOTES :
+// We generate erroneous code if a function is declared const (E0379)
+// This is left to the user to correct as our only option is to remove the
+// function completely which we should not be doing.
+
+// Assist: generate_trait_from_impl
+//
+// Generate trait for an already defined inherent impl and convert impl to a trait impl.
+//
+// ```
+// struct Foo<const N: usize>([i32; N]);
+//
+// macro_rules! const_maker {
+// ($t:ty, $v:tt) => {
+// const CONST: $t = $v;
+// };
+// }
+//
+// impl<const N: usize> Fo$0o<N> {
+// // Used as an associated constant.
+// const CONST_ASSOC: usize = N * 4;
+//
+// fn create() -> Option<()> {
+// Some(())
+// }
+//
+// const_maker! {i32, 7}
+// }
+// ```
+// ->
+// ```
+// struct Foo<const N: usize>([i32; N]);
+//
+// macro_rules! const_maker {
+// ($t:ty, $v:tt) => {
+// const CONST: $t = $v;
+// };
+// }
+//
+// trait ${0:TraitName}<const N: usize> {
+// // Used as an associated constant.
+// const CONST_ASSOC: usize = N * 4;
+//
+// fn create() -> Option<()>;
+//
+// const_maker! {i32, 7}
+// }
+//
+// impl<const N: usize> ${0:TraitName}<N> for Foo<N> {
+// // Used as an associated constant.
+// const CONST_ASSOC: usize = N * 4;
+//
+// fn create() -> Option<()> {
+// Some(())
+// }
+//
+// const_maker! {i32, 7}
+// }
+// ```
+pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // Get AST Node
+ let impl_ast = ctx.find_node_at_offset::<ast::Impl>()?;
+
+ // Check if cursor is to the left of assoc item list's L_CURLY.
+ // if no L_CURLY then return.
+ let l_curly = impl_ast.assoc_item_list()?.l_curly_token()?;
+
+ let cursor_offset = ctx.offset();
+ let l_curly_offset = l_curly.text_range();
+ if cursor_offset >= l_curly_offset.start() {
+ return None;
+ }
+
+ // If impl is not inherent then we don't really need to go any further.
+ if impl_ast.for_token().is_some() {
+ return None;
+ }
+
+ let assoc_items = impl_ast.assoc_item_list()?;
+ let first_element = assoc_items.assoc_items().next();
+ if first_element.is_none() {
+ // No reason for an assist.
+ return None;
+ }
+
+ let impl_name = impl_ast.self_ty()?;
+
+ acc.add(
+ AssistId("generate_trait_from_impl", ide_db::assists::AssistKind::Generate),
+ "Generate trait from impl",
+ impl_ast.syntax().text_range(),
+ |builder| {
+ let trait_items = assoc_items.clone_for_update();
+ let impl_items = assoc_items.clone_for_update();
+
+ trait_items.assoc_items().for_each(|item| {
+ strip_body(&item);
+ remove_items_visibility(&item);
+ });
+
+ impl_items.assoc_items().for_each(|item| {
+ remove_items_visibility(&item);
+ });
+
+ let trait_ast = make::trait_(
+ false,
+ "NewTrait",
+ impl_ast.generic_param_list(),
+ impl_ast.where_clause(),
+ trait_items,
+ );
+
+ // Change `impl Foo` to `impl NewTrait for Foo`
+ let arg_list = if let Some(genpars) = impl_ast.generic_param_list() {
+ genpars.to_generic_args().to_string()
+ } else {
+ "".to_string()
+ };
+
+ if let Some(snippet_cap) = ctx.config.snippet_cap {
+ builder.replace_snippet(
+ snippet_cap,
+ impl_name.syntax().text_range(),
+ format!("${{0:TraitName}}{} for {}", arg_list, impl_name.to_string()),
+ );
+
+ // Insert trait before TraitImpl
+ builder.insert_snippet(
+ snippet_cap,
+ impl_ast.syntax().text_range().start(),
+ format!(
+ "{}\n\n{}",
+ trait_ast.to_string().replace("NewTrait", "${0:TraitName}"),
+ IndentLevel::from_node(impl_ast.syntax())
+ ),
+ );
+ } else {
+ builder.replace(
+ impl_name.syntax().text_range(),
+ format!("NewTrait{} for {}", arg_list, impl_name.to_string()),
+ );
+
+ // Insert trait before TraitImpl
+ builder.insert(
+ impl_ast.syntax().text_range().start(),
+ format!(
+ "{}\n\n{}",
+ trait_ast.to_string(),
+ IndentLevel::from_node(impl_ast.syntax())
+ ),
+ );
+ }
+
+ builder.replace(assoc_items.syntax().text_range(), impl_items.to_string());
+ },
+ );
+
+ Some(())
+}
+
+/// `E0449` Trait items always share the visibility of their trait
+fn remove_items_visibility(item: &ast::AssocItem) {
+ match item {
+ ast::AssocItem::Const(c) => {
+ if let Some(vis) = c.visibility() {
+ ted::remove(vis.syntax());
+ }
+ }
+ ast::AssocItem::Fn(f) => {
+ if let Some(vis) = f.visibility() {
+ ted::remove(vis.syntax());
+ }
+ }
+ ast::AssocItem::TypeAlias(t) => {
+ if let Some(vis) = t.visibility() {
+ ted::remove(vis.syntax());
+ }
+ }
+ _ => (),
+ }
+}
+
+fn strip_body(item: &ast::AssocItem) {
+ match item {
+ ast::AssocItem::Fn(f) => {
+ if let Some(body) = f.body() {
+ // In constrast to function bodies, we want to see no ws before a semicolon.
+ // So let's remove them if we see any.
+ if let Some(prev) = body.syntax().prev_sibling_or_token() {
+ if prev.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(prev);
+ }
+ }
+
+ ted::replace(body.syntax(), make::tokens::semicolon());
+ }
+ }
+ _ => (),
+ };
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
+
+ #[test]
+ fn test_trigger_when_cursor_on_header() {
+ check_assist_not_applicable(
+ generate_trait_from_impl,
+ r#"
+struct Foo(f64);
+
+impl Foo { $0
+ fn add(&mut self, x: f64) {
+ self.0 += x;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_item_fn() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo(f64);
+
+impl F$0oo {
+ fn add(&mut self, x: f64) {
+ self.0 += x;
+ }
+}"#,
+ r#"
+struct Foo(f64);
+
+trait NewTrait {
+ fn add(&mut self, x: f64);
+}
+
+impl NewTrait for Foo {
+ fn add(&mut self, x: f64) {
+ self.0 += x;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_item_macro() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo;
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+impl F$0oo {
+ const_maker! {i32, 7}
+}"#,
+ r#"
+struct Foo;
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+trait NewTrait {
+ const_maker! {i32, 7}
+}
+
+impl NewTrait for Foo {
+ const_maker! {i32, 7}
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_item_const() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo;
+
+impl F$0oo {
+ const ABC: i32 = 3;
+}"#,
+ r#"
+struct Foo;
+
+trait NewTrait {
+ const ABC: i32 = 3;
+}
+
+impl NewTrait for Foo {
+ const ABC: i32 = 3;
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_impl_with_generics() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+impl<const N: usize> F$0oo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+trait NewTrait<const N: usize> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+
+impl<const N: usize> NewTrait<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_trait_items_should_not_have_vis() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo;
+
+impl F$0oo {
+ pub fn a_func() -> Option<()> {
+ Some(())
+ }
+}"#,
+ r#"
+struct Foo;
+
+trait NewTrait {
+ fn a_func() -> Option<()>;
+}
+
+impl NewTrait for Foo {
+ fn a_func() -> Option<()> {
+ Some(())
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_empty_inherent_impl() {
+ check_assist_not_applicable(
+ generate_trait_from_impl,
+ r#"
+impl Emp$0tyImpl{}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_not_top_level_impl() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+mod a {
+ impl S$0 {
+ fn foo() {}
+ }
+}"#,
+ r#"
+mod a {
+ trait NewTrait {
+ fn foo();
+ }
+
+ impl NewTrait for S {
+ fn foo() {}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_snippet_cap_is_some() {
+ check_assist(
+ generate_trait_from_impl,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+impl<const N: usize> F$0oo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+trait ${0:TraitName}<const N: usize> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+
+impl<const N: usize> ${0:TraitName}<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index 797180fa1..ffab58509 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -15,7 +15,7 @@ use ide_db::{
};
use itertools::{izip, Itertools};
use syntax::{
- ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
+ ast::{self, edit::IndentLevel, edit_in_place::Indent, HasArgList, PathExpr},
ted, AstNode, NodeOrToken, SyntaxKind,
};
@@ -80,7 +80,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let is_recursive_fn = usages
.clone()
- .in_scope(SearchScope::file_range(FileRange {
+ .in_scope(&SearchScope::file_range(FileRange {
file_id: def_file,
range: func_body.syntax().text_range(),
}))
@@ -306,7 +306,7 @@ fn inline(
params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
CallInfo { node, arguments, generic_arg_list }: &CallInfo,
) -> ast::Expr {
- let body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
+ let mut body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
cov_mark::hit!(inline_call_defined_in_macro);
if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
body
@@ -391,19 +391,19 @@ fn inline(
}
}
+ let mut let_stmts = Vec::new();
+
// Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
- for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
+ for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments) {
// izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
let usages: &[ast::PathExpr] = &usages;
let expr: &ast::Expr = expr;
- let insert_let_stmt = || {
+ let mut insert_let_stmt = || {
let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
- if let Some(stmt_list) = body.stmt_list() {
- stmt_list.push_front(
- make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(),
- )
- }
+ let_stmts.push(
+ make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(),
+ );
};
// check if there is a local var in the function that conflicts with parameter
@@ -457,6 +457,24 @@ fn inline(
}
}
+ let is_async_fn = function.is_async(sema.db);
+ if is_async_fn {
+ cov_mark::hit!(inline_call_async_fn);
+ body = make::async_move_block_expr(body.statements(), body.tail_expr()).clone_for_update();
+
+ // Arguments should be evaluated outside the async block, and then moved into it.
+ if !let_stmts.is_empty() {
+ cov_mark::hit!(inline_call_async_fn_with_let_stmts);
+ body.indent(IndentLevel(1));
+ body = make::block_expr(let_stmts, Some(body.into())).clone_for_update();
+ }
+ } else if let Some(stmt_list) = body.stmt_list() {
+ ted::insert_all(
+ ted::Position::after(stmt_list.l_curly_token().unwrap()),
+ let_stmts.into_iter().map(|stmt| stmt.syntax().clone().into()).collect(),
+ );
+ }
+
let original_indentation = match node {
ast::CallableExpr::Call(it) => it.indent_level(),
ast::CallableExpr::MethodCall(it) => it.indent_level(),
@@ -464,7 +482,7 @@ fn inline(
body.reindent_to(original_indentation);
match body.tail_expr() {
- Some(expr) if body.statements().next().is_none() => expr,
+ Some(expr) if !is_async_fn && body.statements().next().is_none() => expr,
_ => match node
.syntax()
.parent()
@@ -1353,4 +1371,107 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn async_fn_single_expression() {
+ cov_mark::check!(inline_call_async_fn);
+ check_assist(
+ inline_call,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await * 2
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(foo$0(42));
+}
+"#,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await * 2
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(async move {
+ bar(42).await * 2
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn async_fn_multiple_statements() {
+ cov_mark::check!(inline_call_async_fn);
+ check_assist(
+ inline_call,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await;
+ 42
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(foo$0(42));
+}
+"#,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await;
+ 42
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(async move {
+ bar(42).await;
+ 42
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn async_fn_with_let_statements() {
+ cov_mark::check!(inline_call_async_fn);
+ cov_mark::check!(inline_call_async_fn_with_let_stmts);
+ check_assist(
+ inline_call,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(x: u32, y: u32, z: &u32) -> u32 {
+ bar(x).await;
+ y + y + *z
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ let var = 42;
+ spawn(foo$0(var, var + 1, &var));
+}
+"#,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(x: u32, y: u32, z: &u32) -> u32 {
+ bar(x).await;
+ y + y + *z
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ let var = 42;
+ spawn({
+ let y = var + 1;
+ let z: &u32 = &var;
+ async move {
+ bar(var).await;
+ y + y + *z
+ }
+ });
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
index 5aa8e56f5..5d956b1a5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
@@ -37,11 +37,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update());
-
let text_range = unexpanded.syntax().text_range();
acc.add(
- AssistId("inline_macro", AssistKind::RefactorRewrite),
+ AssistId("inline_macro", AssistKind::RefactorInline),
format!("Inline macro"),
text_range,
|builder| builder.replace(text_range, expanded.to_string()),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
index b6027eac5..22d536b5a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -54,7 +54,11 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// NOTE: We can technically provide this assist for default methods in trait definitions, but
// it's somewhat complex to handle it correctly when the const's name conflicts with
// supertrait's item. We may want to consider implementing it in the future.
- let AssocItemContainer::Impl(impl_) = ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db) else { return None; };
+ let AssocItemContainer::Impl(impl_) =
+ ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db)
+ else {
+ return None;
+ };
if impl_.trait_(db).is_some() {
return None;
}
@@ -78,17 +82,19 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
return None;
}
- let usages =
- Definition::Const(def).usages(&ctx.sema).in_scope(SearchScope::file_range(FileRange {
- file_id: ctx.file_id(),
- range: parent_fn.syntax().text_range(),
- }));
-
acc.add(
AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite),
"Move const to impl block",
const_.syntax().text_range(),
|builder| {
+ let usages = Definition::Const(def)
+ .usages(&ctx.sema)
+ .in_scope(&SearchScope::file_range(FileRange {
+ file_id: ctx.file_id(),
+ range: parent_fn.syntax().text_range(),
+ }))
+ .all();
+
let range_to_delete = match const_.syntax().next_sibling_or_token() {
Some(s) if matches!(s.kind(), SyntaxKind::WHITESPACE) => {
// Remove following whitespaces too.
@@ -99,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
builder.delete(range_to_delete);
let const_ref = format!("Self::{}", name.display(ctx.db()));
- for range in usages.all().file_ranges().map(|it| it.range) {
+ for range in usages.file_ranges().map(|it| it.range) {
builder.replace(range, const_ref.clone());
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 23153b4c5..5cc110cf1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -8,13 +8,10 @@ use ide_db::{
use stdx::to_upper_snake_case;
use syntax::{
ast::{self, make, HasName},
- AstNode, WalkEvent,
+ ted, AstNode, WalkEvent,
};
-use crate::{
- assist_context::{AssistContext, Assists},
- utils::{render_snippet, Cursor},
-};
+use crate::assist_context::{AssistContext, Assists};
// Assist: promote_local_to_const
//
@@ -70,29 +67,33 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
cov_mark::hit!(promote_local_non_const);
return None;
}
- let target = let_stmt.syntax().text_range();
+
acc.add(
AssistId("promote_local_to_const", AssistKind::Refactor),
"Promote local to constant",
- target,
- |builder| {
+ let_stmt.syntax().text_range(),
+ |edit| {
let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) {
+ let name = make::name_ref(&name);
+
for usage in usages {
- builder.replace(usage.range, &name);
+ let Some(usage) = usage.name.as_name_ref().cloned() else { continue };
+ let usage = edit.make_mut(usage);
+ ted::replace(usage.syntax(), name.clone_for_update().syntax());
}
}
- let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer);
- match ctx.config.snippet_cap.zip(item.name()) {
- Some((cap, name)) => builder.replace_snippet(
- cap,
- target,
- render_snippet(cap, item.syntax(), Cursor::Before(name.syntax())),
- ),
- None => builder.replace(target, item.to_string()),
+ let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer)
+ .clone_for_update();
+ let let_stmt = edit.make_mut(let_stmt);
+
+ if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) {
+ edit.add_tabstop_before(cap, name);
}
+
+ ted::replace(let_stmt.syntax(), item.syntax());
},
)
}
@@ -158,6 +159,27 @@ fn foo() {
}
#[test]
+ fn multiple_uses() {
+ check_assist(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let x$0 = 0;
+ let y = x;
+ let z = (x, x, x, x);
+}
+",
+ r"
+fn foo() {
+ const $0X: i32 = 0;
+ let y = X;
+ let z = (X, X, X, X);
+}
+",
+ );
+ }
+
+ #[test]
fn not_applicable_non_const_meth_call() {
cov_mark::check!(promote_local_non_const);
check_assist_not_applicable(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
index a5c7fea40..f222b3eb9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -102,7 +102,7 @@ struct AssignmentsCollector<'a> {
assignments: Vec<(ast::BinExpr, ast::Expr)>,
}
-impl<'a> AssignmentsCollector<'a> {
+impl AssignmentsCollector<'_> {
fn collect_match(&mut self, match_expr: &ast::MatchExpr) -> Option<()> {
for arm in match_expr.match_arm_list()?.arms() {
match arm.expr()? {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
new file mode 100644
index 000000000..dd4839351
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -0,0 +1,739 @@
+use std::collections::{hash_map::Entry, HashMap};
+
+use hir::{InFile, Module, ModuleSource};
+use ide_db::{
+ base_db::FileRange,
+ defs::Definition,
+ search::{FileReference, ReferenceCategory, SearchScope},
+ RootDatabase,
+};
+use syntax::{ast, AstNode};
+use text_edit::TextRange;
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_unused_imports
+//
+// Removes any use statements in the current selection that are unused.
+//
+// ```
+// struct X();
+// mod foo {
+// use super::X$0;
+// }
+// ```
+// ->
+// ```
+// struct X();
+// mod foo {
+// }
+// ```
+pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // First, grab the uses that intersect with the current selection.
+ let selected_el = match ctx.covering_element() {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ // This applies to all uses that are selected, or are ancestors of our selection.
+ let uses_up = selected_el.ancestors().skip(1).filter_map(ast::Use::cast);
+ let uses_down = selected_el
+ .descendants()
+ .filter(|x| x.text_range().intersect(ctx.selection_trimmed()).is_some())
+ .filter_map(ast::Use::cast);
+ let uses = uses_up.chain(uses_down).collect::<Vec<_>>();
+
+ // Maps use nodes to the scope that we should search through to find
+ let mut search_scopes = HashMap::<Module, Vec<SearchScope>>::new();
+
+ // iterator over all unused use trees
+ let mut unused = uses
+ .into_iter()
+ .flat_map(|u| u.syntax().descendants().filter_map(ast::UseTree::cast))
+ .filter(|u| u.use_tree_list().is_none())
+ .filter_map(|u| {
+ // Find any uses trees that are unused
+
+ let use_module = ctx.sema.scope(&u.syntax()).map(|s| s.module())?;
+ let scope = match search_scopes.entry(use_module) {
+ Entry::Occupied(o) => o.into_mut(),
+ Entry::Vacant(v) => v.insert(module_search_scope(ctx.db(), use_module)),
+ };
+
+ // Gets the path associated with this use tree. If there isn't one, then ignore this use tree.
+ let path = if let Some(path) = u.path() {
+ path
+ } else if u.star_token().is_some() {
+ // This case maps to the situation where the * token is braced.
+ // In this case, the parent use tree's path is the one we should use to resolve the glob.
+ match u.syntax().ancestors().skip(1).find_map(ast::UseTree::cast) {
+ Some(parent_u) if parent_u.path().is_some() => parent_u.path().unwrap(),
+ _ => return None,
+ }
+ } else {
+ return None;
+ };
+
+ // Get the actual definition associated with this use item.
+ let res = match ctx.sema.resolve_path(&path) {
+ Some(x) => x,
+ None => {
+ return None;
+ }
+ };
+
+ let def = match res {
+ hir::PathResolution::Def(d) => Definition::from(d),
+ _ => return None,
+ };
+
+ if u.star_token().is_some() {
+ // Check if any of the children of this module are used
+ let def_mod = match def {
+ Definition::Module(module) => module,
+ _ => return None,
+ };
+
+ if !def_mod
+ .scope(ctx.db(), Some(use_module))
+ .iter()
+ .filter_map(|(_, x)| match x {
+ hir::ScopeDef::ModuleDef(d) => Some(Definition::from(*d)),
+ _ => None,
+ })
+ .any(|d| used_once_in_scope(ctx, d, scope))
+ {
+ return Some(u);
+ }
+ } else if let Definition::Trait(ref t) = def {
+ // If the trait or any item is used.
+ if !std::iter::once(def)
+ .chain(t.items(ctx.db()).into_iter().map(Definition::from))
+ .any(|d| used_once_in_scope(ctx, d, scope))
+ {
+ return Some(u);
+ }
+ } else {
+ if !used_once_in_scope(ctx, def, &scope) {
+ return Some(u);
+ }
+ }
+
+ None
+ })
+ .peekable();
+
+ // Peek so we terminate early if an unused use is found. Only do the rest of the work if the user selects the assist.
+ if unused.peek().is_some() {
+ acc.add(
+ AssistId("remove_unused_imports", AssistKind::QuickFix),
+ "Remove all the unused imports",
+ selected_el.text_range(),
+ |builder| {
+ let unused: Vec<ast::UseTree> = unused.map(|x| builder.make_mut(x)).collect();
+ for node in unused {
+ node.remove_recursive();
+ }
+ },
+ )
+ } else {
+ None
+ }
+}
+
+fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<SearchScope>) -> bool {
+ let mut found = false;
+
+ for scope in scopes {
+ let mut search_non_import = |_, r: FileReference| {
+ // The import itself is a use; we must skip that.
+ if r.category != Some(ReferenceCategory::Import) {
+ found = true;
+ true
+ } else {
+ false
+ }
+ };
+ def.usages(&ctx.sema).in_scope(scope).search(&mut search_non_import);
+ if found {
+ break;
+ }
+ }
+
+ found
+}
+
+/// Build a search scope spanning the given module but none of its submodules.
+fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
+ let (file_id, range) = {
+ let InFile { file_id, value } = module.definition_source(db);
+ if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ (file_id, Some(call_source.text_range()))
+ } else {
+ (
+ file_id.original_file(db),
+ match value {
+ ModuleSource::SourceFile(_) => None,
+ ModuleSource::Module(it) => Some(it.syntax().text_range()),
+ ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
+ },
+ )
+ }
+ };
+
+ fn split_at_subrange(first: TextRange, second: TextRange) -> (TextRange, Option<TextRange>) {
+ let intersect = first.intersect(second);
+ if let Some(intersect) = intersect {
+ let start_range = TextRange::new(first.start(), intersect.start());
+
+ if intersect.end() < first.end() {
+ (start_range, Some(TextRange::new(intersect.end(), first.end())))
+ } else {
+ (start_range, None)
+ }
+ } else {
+ (first, None)
+ }
+ }
+
+ let mut scopes = Vec::new();
+ if let Some(range) = range {
+ let mut ranges = vec![range];
+
+ for child in module.children(db) {
+ let rng = match child.definition_source(db).value {
+ ModuleSource::SourceFile(_) => continue,
+ ModuleSource::Module(it) => it.syntax().text_range(),
+ ModuleSource::BlockExpr(_) => continue,
+ };
+ let mut new_ranges = Vec::new();
+ for old_range in ranges.iter_mut() {
+ let split = split_at_subrange(old_range.clone(), rng);
+ *old_range = split.0;
+ new_ranges.extend(split.1);
+ }
+
+ ranges.append(&mut new_ranges);
+ }
+
+ for range in ranges {
+ scopes.push(SearchScope::file_range(FileRange { file_id, range }));
+ }
+ } else {
+ scopes.push(SearchScope::single_file(file_id));
+ }
+
+ scopes
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn remove_unused() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ $0use super::X;
+ use super::Y;$0
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_is_precise() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod z {
+$0use super::X;$0
+
+fn w() {
+ struct X();
+ let x = X();
+}
+}
+"#,
+ r#"
+struct X();
+mod z {
+
+fn w() {
+ struct X();
+ let x = X();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_name_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f();
+}
+
+impl Y for X {
+ fn f() {}
+}
+mod z {
+$0use super::X;
+use super::Y;$0
+
+fn w() {
+ X::f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_item_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f(self);
+}
+
+impl Y for X {
+ fn f(self) {}
+}
+mod z {
+$0use super::X;
+use super::Y;$0
+
+fn w() {
+ let x = X();
+ x.f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ranamed_trait_item_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f(self);
+}
+
+impl Y for X {
+ fn f(self) {}
+}
+mod z {
+$0use super::X;
+use super::Y as Z;$0
+
+fn w() {
+ let x = X();
+ x.f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ranamed_underscore_trait_item_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f(self);
+}
+
+impl Y for X {
+ fn f(self) {}
+}
+mod z {
+$0use super::X;
+use super::Y as _;$0
+
+fn w() {
+ let x = X();
+ x.f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn dont_remove_used() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+$0use super::X;
+use super::Y;$0
+
+fn w() {
+ let x = X();
+ let y = Y();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_in_braces() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ $0use super::{X, Y};$0
+
+ fn w() {
+ let x = X();
+ }
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::{X};
+
+ fn w() {
+ let x = X();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_under_cursor() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod z {
+ use super::X$0;
+}
+"#,
+ r#"
+struct X();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_multi_use_block() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+$0mod y {
+ use super::X;
+}
+mod z {
+ use super::X;
+}$0
+"#,
+ r#"
+struct X();
+mod y {
+}
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_nested() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X, y::Y}$0;
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X};
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_nested_first_item() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X, y::Y}$0;
+ fn f() {
+ let y = Y();
+ }
+ }
+}
+"#,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{y::Y};
+ fn f() {
+ let y = Y();
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_nested_all_unused() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X, y::Y}$0;
+ }
+}
+"#,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_glob() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::*$0;
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_braced_glob() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::{*}$0;
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn dont_remove_used_glob() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::*$0;
+
+ fn f() {
+ let x = X();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn only_remove_from_selection() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ $0use super::X;$0
+ use super::Y;
+}
+mod w {
+ use super::Y;
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::Y;
+}
+mod w {
+ use super::Y;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_several_files() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+//- /foo.rs
+pub struct X();
+pub struct Y();
+
+//- /main.rs
+$0use foo::X;
+use foo::Y;
+$0
+mod foo;
+mod z {
+ use crate::foo::X;
+}
+"#,
+ r#"
+
+mod foo;
+mod z {
+ use crate::foo::X;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn use_in_submodule_doesnt_count() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod z {
+ use super::X$0;
+
+ mod w {
+ use crate::X;
+
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ r#"
+struct X();
+mod z {
+
+ mod w {
+ use crate::X;
+
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn use_in_submodule_file_doesnt_count() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+//- /z/foo.rs
+use crate::X;
+fn f() {
+ let x = X();
+}
+
+//- /main.rs
+pub struct X();
+
+mod z {
+ use crate::X$0;
+ mod foo;
+}
+"#,
+ r#"
+pub struct X();
+
+mod z {
+ mod foo;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 3bdd795be..ac45581b7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -10,7 +10,7 @@ use crate::{
assist_context::{AssistContext, Assists, SourceChangeBuilder},
utils::{
add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
- generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
+ generate_trait_impl_text, render_snippet, Cursor, DefaultMethods, IgnoreAssocItems,
},
AssistId, AssistKind,
};
@@ -73,7 +73,7 @@ pub(crate) fn replace_derive_with_manual_impl(
&ctx.sema,
current_crate,
NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
- items_locator::AssocItemSearch::Exclude,
+ items_locator::AssocSearchMode::Exclude,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|item| match item.as_module_def()? {
@@ -172,7 +172,17 @@ fn impl_def_from_trait(
) -> Option<(ast::Impl, ast::AssocItem)> {
let trait_ = trait_?;
let target_scope = sema.scope(annotated_name.syntax())?;
- let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
+
+ // Keep assoc items of local crates even if they have #[doc(hidden)] attr.
+ let ignore_items = if trait_.module(sema.db).krate().origin(sema.db).is_local() {
+ IgnoreAssocItems::No
+ } else {
+ IgnoreAssocItems::DocHiddenAttrPresent
+ };
+
+ let trait_items =
+ filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No, ignore_items);
+
if trait_items.is_empty() {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
index e7b62d49b..c7c0be4c7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
@@ -157,7 +157,7 @@ fn find_usages(
file_id: FileId,
) -> UsageSearchResult {
let file_range = FileRange { file_id, range: fn_.syntax().text_range() };
- type_param_def.usages(sema).in_scope(SearchScope::file_range(file_range)).all()
+ type_param_def.usages(sema).in_scope(&SearchScope::file_range(file_range)).all()
}
fn check_valid_usages(usages: &UsageSearchResult, param_list_range: TextRange) -> bool {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
index 26f3c1926..f235b554e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -38,14 +38,18 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<'
};
let type_ref = &ret_type.ty()?;
- let Some(hir::Adt::Enum(ret_enum)) = ctx.sema.resolve_type(type_ref)?.as_adt() else { return None; };
+ let Some(hir::Adt::Enum(ret_enum)) = ctx.sema.resolve_type(type_ref)?.as_adt() else {
+ return None;
+ };
let result_enum =
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
if ret_enum != result_enum {
return None;
}
- let Some(ok_type) = unwrap_result_type(type_ref) else { return None; };
+ let Some(ok_type) = unwrap_result_type(type_ref) else {
+ return None;
+ };
acc.add(
AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
@@ -130,12 +134,16 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
// Tries to extract `T` from `Result<T, E>`.
fn unwrap_result_type(ty: &ast::Type) -> Option<ast::Type> {
- let ast::Type::PathType(path_ty) = ty else { return None; };
+ let ast::Type::PathType(path_ty) = ty else {
+ return None;
+ };
let path = path_ty.path()?;
let segment = path.first_segment()?;
let generic_arg_list = segment.generic_arg_list()?;
let generic_args: Vec<_> = generic_arg_list.generic_args().collect();
- let ast::GenericArg::TypeArg(ok_type) = generic_args.first()? else { return None; };
+ let ast::GenericArg::TypeArg(ok_type) = generic_args.first()? else {
+ return None;
+ };
ok_type.ty()
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
index b6c489eb6..24c338745 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
@@ -6,7 +6,7 @@ use ide_db::{
};
use syntax::{
ast::{self, make, Expr},
- match_ast, AstNode,
+ match_ast, ted, AstNode,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -52,8 +52,8 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite),
"Wrap return type in Result",
type_ref.syntax().text_range(),
- |builder| {
- let body = ast::Expr::BlockExpr(body);
+ |edit| {
+ let body = edit.make_mut(ast::Expr::BlockExpr(body));
let mut exprs_to_wrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
@@ -70,17 +70,24 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
let ok_wrapped = make::expr_call(
make::expr_path(make::ext::ident_path("Ok")),
make::arg_list(iter::once(ret_expr_arg.clone())),
- );
- builder.replace_ast(ret_expr_arg, ok_wrapped);
+ )
+ .clone_for_update();
+ ted::replace(ret_expr_arg.syntax(), ok_wrapped.syntax());
}
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snippet = format!("Result<{type_ref}, ${{0:_}}>");
- builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet)
- }
- None => builder
- .replace(type_ref.syntax().text_range(), format!("Result<{type_ref}, _>")),
+ let new_result_ty =
+ make::ext::ty_result(type_ref.clone(), make::ty_placeholder()).clone_for_update();
+ let old_result_ty = edit.make_mut(type_ref.clone());
+
+ ted::replace(old_result_ty.syntax(), new_result_ty.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ let generic_args = new_result_ty
+ .syntax()
+ .descendants()
+ .find_map(ast::GenericArgList::cast)
+ .unwrap();
+ edit.add_placeholder_snippet(cap, generic_args.generic_args().last().unwrap());
}
},
)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 111753bf3..2ebb5ef9b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -145,6 +145,7 @@ mod handlers {
mod generate_constant;
mod generate_default_from_enum_variant;
mod generate_default_from_new;
+ mod generate_delegate_trait;
mod generate_deref;
mod generate_derive;
mod generate_documentation_template;
@@ -153,12 +154,12 @@ mod handlers {
mod generate_enum_variant;
mod generate_from_impl_for_enum;
mod generate_function;
- mod generate_getter;
+ mod generate_getter_or_setter;
mod generate_impl;
mod generate_is_empty_from_len;
mod generate_new;
- mod generate_setter;
mod generate_delegate_methods;
+ mod generate_trait_from_impl;
mod add_return_type;
mod inline_call;
mod inline_const_as_literal;
@@ -183,6 +184,7 @@ mod handlers {
mod raw_string;
mod remove_dbg;
mod remove_mut;
+ mod remove_unused_imports;
mod remove_unused_param;
mod remove_parentheses;
mod reorder_fields;
@@ -251,6 +253,7 @@ mod handlers {
generate_constant::generate_constant,
generate_default_from_enum_variant::generate_default_from_enum_variant,
generate_default_from_new::generate_default_from_new,
+ generate_delegate_trait::generate_delegate_trait,
generate_derive::generate_derive,
generate_documentation_template::generate_documentation_template,
generate_documentation_template::generate_doc_example,
@@ -264,6 +267,7 @@ mod handlers {
generate_impl::generate_trait_impl,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_new::generate_new,
+ generate_trait_from_impl::generate_trait_from_impl,
inline_call::inline_call,
inline_call::inline_into_callers,
inline_const_as_literal::inline_const_as_literal,
@@ -291,6 +295,7 @@ mod handlers {
raw_string::make_usual_string,
raw_string::remove_hash,
remove_mut::remove_mut,
+ remove_unused_imports::remove_unused_imports,
remove_unused_param::remove_unused_param,
remove_parentheses::remove_parentheses,
reorder_fields::reorder_fields,
@@ -334,9 +339,9 @@ mod handlers {
extract_function::extract_function,
extract_module::extract_module,
//
- generate_getter::generate_getter,
- generate_getter::generate_getter_mut,
- generate_setter::generate_setter,
+ generate_getter_or_setter::generate_getter,
+ generate_getter_or_setter::generate_getter_mut,
+ generate_getter_or_setter::generate_setter,
generate_delegate_methods::generate_delegate_methods,
generate_deref::generate_deref,
//
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index 344f2bfcc..cc3e251a8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -132,8 +132,13 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
.filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
.expect("Assist did not contain any source changes");
let mut actual = before;
- if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
+ if let Some((source_file_edit, snippet_edit)) =
+ source_change.get_source_and_snippet_edit(file_id)
+ {
source_file_edit.apply(&mut actual);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut actual);
+ }
}
actual
};
@@ -191,9 +196,12 @@ fn check_with_config(
&& source_change.file_system_edits.len() == 0;
let mut buf = String::new();
- for (file_id, edit) in source_change.source_file_edits {
+ for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
let mut text = db.file_text(file_id).as_ref().to_owned();
edit.apply(&mut text);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut text);
+ }
if !skip_header {
let sr = db.file_source_root(file_id);
let sr = db.source_root(sr);
@@ -485,18 +493,21 @@ pub fn test_some_range(a: int) -> bool {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "let $0var_name = 5;\n ",
- delete: 45..45,
- },
- Indel {
- insert: "var_name",
- delete: 59..60,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: true,
@@ -544,18 +555,21 @@ pub fn test_some_range(a: int) -> bool {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "let $0var_name = 5;\n ",
- delete: 45..45,
- },
- Indel {
- insert: "var_name",
- delete: 59..60,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: true,
@@ -581,18 +595,21 @@ pub fn test_some_range(a: int) -> bool {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "fun_name()",
- delete: 59..60,
- },
- Indel {
- insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
- delete: 110..110,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "fun_name()",
+ delete: 59..60,
+ },
+ Indel {
+ insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
+ delete: 110..110,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: true,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index c097e0739..6eadc3dbc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -952,6 +952,7 @@ fn doctest_generate_default_from_new() {
check_doc_test(
"generate_default_from_new",
r#####"
+//- minicore: default
struct Example { _inner: () }
impl Example {
@@ -1016,6 +1017,69 @@ impl Person {
}
#[test]
+fn doctest_generate_delegate_trait() {
+ check_doc_test(
+ "generate_delegate_trait",
+ r#####"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a$0: A,
+}
+"#####,
+ r#####"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a: A,
+}
+
+impl SomeTrait for B {
+ type T = <A as SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <A as SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <A as SomeTrait>::method_( &mut self.a )
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_deref() {
check_doc_test(
"generate_deref",
@@ -1429,7 +1493,7 @@ struct Person {
}
impl Person {
- fn set_name(&mut self, name: String) {
+ fn $0set_name(&mut self, name: String) {
self.name = name;
}
}
@@ -1438,6 +1502,62 @@ impl Person {
}
#[test]
+fn doctest_generate_trait_from_impl() {
+ check_doc_test(
+ "generate_trait_from_impl",
+ r#####"
+struct Foo<const N: usize>([i32; N]);
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+impl<const N: usize> Fo$0o<N> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()> {
+ Some(())
+ }
+
+ const_maker! {i32, 7}
+}
+"#####,
+ r#####"
+struct Foo<const N: usize>([i32; N]);
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+trait ${0:TraitName}<const N: usize> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()>;
+
+ const_maker! {i32, 7}
+}
+
+impl<const N: usize> ${0:TraitName}<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()> {
+ Some(())
+ }
+
+ const_maker! {i32, 7}
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_trait_impl() {
check_doc_test(
"generate_trait_impl",
@@ -2115,6 +2235,24 @@ fn main() {
}
#[test]
+fn doctest_remove_unused_imports() {
+ check_doc_test(
+ "remove_unused_imports",
+ r#####"
+struct X();
+mod foo {
+ use super::X$0;
+}
+"#####,
+ r#####"
+struct X();
+mod foo {
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_remove_unused_param() {
check_doc_test(
"remove_unused_param",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 03d855350..a262570d9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -3,7 +3,7 @@
use std::ops;
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
-use hir::{db::HirDatabase, HirDisplay, InFile, Semantics};
+use hir::{db::HirDatabase, HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics};
use ide_db::{
famous_defs::FamousDefs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, SnippetCap,
@@ -84,6 +84,12 @@ pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
})
}
+#[derive(Clone, Copy, PartialEq)]
+pub enum IgnoreAssocItems {
+ DocHiddenAttrPresent,
+ No,
+}
+
#[derive(Copy, Clone, PartialEq)]
pub enum DefaultMethods {
Only,
@@ -94,11 +100,16 @@ pub fn filter_assoc_items(
sema: &Semantics<'_, RootDatabase>,
items: &[hir::AssocItem],
default_methods: DefaultMethods,
+ ignore_items: IgnoreAssocItems,
) -> Vec<InFile<ast::AssocItem>> {
return items
.iter()
- // Note: This throws away items with no source.
.copied()
+ .filter(|assoc_item| {
+ !(ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
+ && assoc_item.attrs(sema.db).has_doc_hidden())
+ })
+ // Note: This throws away items with no source.
.filter_map(|assoc_item| {
let item = match assoc_item {
hir::AssocItem::Function(it) => sema.source(it)?.map(ast::AssocItem::Fn),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
index 19bfd294b..62bdb6ee6 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
@@ -42,10 +42,11 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
};
}
-const KNOWN_ARCH: [&str; 19] = [
+const KNOWN_ARCH: [&str; 20] = [
"aarch64",
"arm",
"avr",
+ "csky",
"hexagon",
"mips",
"mips64",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
index c717a9cb5..e411c1c86 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
@@ -30,6 +30,8 @@ const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[
"efiapi",
"avr-interrupt",
"avr-non-blocking-interrupt",
+ "riscv-interrupt-m",
+ "riscv-interrupt-s",
"C-cmse-nonsecure-call",
"wasm",
"system",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index d3e75c6da..1e0989405 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -42,7 +42,7 @@ pub(crate) fn complete_mod(
}
let module_definition_file =
- current_module.definition_source(ctx.db).file_id.original_file(ctx.db);
+ current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file));
let directory_to_look_for_submodules = directory_to_look_for_submodules(
current_module,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index 7b145f3c1..3cb65b272 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -463,7 +463,9 @@ impl CompletionContext<'_> {
/// Checks whether this item should be listed in regards to stability. Returns `true` if we should.
pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
- let Some(attrs) = attrs else { return true; };
+ let Some(attrs) = attrs else {
+ return true;
+ };
!attrs.is_unstable() || self.is_nightly
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index cc5221cfc..3ea506590 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -243,10 +243,7 @@ fn analyze(
let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
- CompletionAnalysis::String {
- original,
- expanded: ast::String::cast(self_token.clone()),
- }
+ CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
} else {
// Fix up trailing whitespace problem
// #[attr(foo = $0
@@ -736,7 +733,7 @@ fn classify_name_ref(
return None;
}
let parent = match ast::Fn::cast(parent.parent()?) {
- Some(x) => x.param_list(),
+ Some(it) => it.param_list(),
None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
};
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index e850f7bfd..0309952c2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -427,9 +427,26 @@ impl Builder {
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
if !self.doc_aliases.is_empty() {
- let doc_aliases = self.doc_aliases.into_iter().join(", ");
+ let doc_aliases = self.doc_aliases.iter().join(", ");
label = SmolStr::from(format!("{label} (alias {doc_aliases})"));
- lookup = SmolStr::from(format!("{lookup} {doc_aliases}"));
+ let lookup_doc_aliases = self
+ .doc_aliases
+ .iter()
+ // Don't include aliases in `lookup` that aren't valid identifiers as including
+ // them results in weird completion filtering behavior e.g. `Partial>` matching
+ // `PartialOrd` because it has an alias of ">".
+ .filter(|alias| {
+ let mut chars = alias.chars();
+ chars.next().is_some_and(char::is_alphabetic)
+ && chars.all(|c| c.is_alphanumeric() || c == '_')
+ })
+ // Deliberately concatenated without separators as adding separators e.g.
+ // `alias1, alias2` results in LSP clients continuing to display the completion even
+ // after typing a comma or space.
+ .join("");
+ if !lookup_doc_aliases.is_empty() {
+ lookup = SmolStr::from(format!("{lookup}{lookup_doc_aliases}"));
+ }
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index 106d4e1e5..2eaa42040 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -231,7 +231,7 @@ pub fn resolve_completion_edits(
&sema,
current_crate,
NameToImport::exact_case_sensitive(imported_name),
- items_locator::AssocItemSearch::Include,
+ items_locator::AssocSearchMode::Include,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
);
let import = items_with_name
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
index c97144b61..1aaf39587 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
@@ -300,6 +300,7 @@ struct Foo;
at deprecated
at derive macro derive
at derive(…)
+ at derive_const macro derive_const
at doc = "…"
at doc(alias = "…")
at doc(hidden)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index 382472083..e80a28904 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -1280,3 +1280,26 @@ fn here_we_go() {
"#]],
);
}
+
+#[test]
+fn completion_filtering_excludes_non_identifier_doc_aliases() {
+ check_edit(
+ "PartialOrdcmporder",
+ r#"
+#[doc(alias = ">")]
+#[doc(alias = "cmp")]
+#[doc(alias = "order")]
+trait PartialOrd {}
+
+struct Foo<T: Partial$0
+"#,
+ r#"
+#[doc(alias = ">")]
+#[doc(alias = "cmp")]
+#[doc(alias = "order")]
+trait PartialOrd {}
+
+struct Foo<T: PartialOrd
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index 4e75dc4db..faec74206 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -21,7 +21,7 @@ once_cell = "1.17.0"
either = "1.7.0"
itertools = "0.10.5"
arrayvec = "0.7.2"
-indexmap = "1.9.1"
+indexmap = "2.0.0"
memchr = "2.5.0"
triomphe.workspace = true
nohash-hasher.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index 0dd544d0a..a0b05c87a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -99,8 +99,8 @@ impl RootDatabase {
hir::db::AstIdMapQuery
hir::db::ParseMacroExpansionQuery
hir::db::InternMacroCallQuery
- hir::db::MacroArgTextQuery
- hir::db::MacroDefQuery
+ hir::db::MacroArgNodeQuery
+ hir::db::DeclMacroExpanderQuery
hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery
hir::db::HygieneFrameQuery
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 760834bfa..5e4562d9c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -7,10 +7,10 @@
use arrayvec::ArrayVec;
use hir::{
- Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
- Function, GenericParam, HasVisibility, Impl, Label, Local, Macro, Module, ModuleDef, Name,
- PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TypeAlias, Variant,
- Visibility,
+ Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper,
+ ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro,
+ Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias,
+ TypeAlias, Variant, Visibility,
};
use stdx::impl_from;
use syntax::{
@@ -42,6 +42,7 @@ pub enum Definition {
DeriveHelper(DeriveHelper),
BuiltinAttr(BuiltinAttr),
ToolModule(ToolModule),
+ ExternCrateDecl(ExternCrateDecl),
}
impl Definition {
@@ -73,6 +74,7 @@ impl Definition {
Definition::Local(it) => it.module(db),
Definition::GenericParam(it) => it.module(db),
Definition::Label(it) => it.module(db),
+ Definition::ExternCrateDecl(it) => it.module(db),
Definition::DeriveHelper(it) => it.derive().module(db),
Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
return None
@@ -93,6 +95,7 @@ impl Definition {
Definition::TraitAlias(it) => it.visibility(db),
Definition::TypeAlias(it) => it.visibility(db),
Definition::Variant(it) => it.visibility(db),
+ Definition::ExternCrateDecl(it) => it.visibility(db),
Definition::BuiltinType(_) => Visibility::Public,
Definition::Macro(_) => return None,
Definition::BuiltinAttr(_)
@@ -127,6 +130,7 @@ impl Definition {
Definition::BuiltinAttr(_) => return None, // FIXME
Definition::ToolModule(_) => return None, // FIXME
Definition::DeriveHelper(it) => it.name(db),
+ Definition::ExternCrateDecl(it) => return it.alias_or_name(db),
};
Some(name)
}
@@ -196,6 +200,10 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
+ IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, krate }) => {
+ res.push(Definition::ExternCrateDecl(decl));
+ res.push(Definition::Module(krate.root_module()));
+ }
IdentClass::Operator(
OperatorClass::Await(func)
| OperatorClass::Prefix(func)
@@ -222,6 +230,10 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
+ IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, krate }) => {
+ res.push(Definition::ExternCrateDecl(decl));
+ res.push(Definition::Module(krate.root_module()));
+ }
IdentClass::Operator(_) => (),
}
res
@@ -310,6 +322,7 @@ impl NameClass {
ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)),
+ ast::Item::ExternCrate(it) => Definition::ExternCrateDecl(sema.to_def(&it)?),
_ => return None,
};
Some(definition)
@@ -346,10 +359,8 @@ impl NameClass {
let path = use_tree.path()?;
sema.resolve_path(&path).map(Definition::from)
} else {
- let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?;
- let krate = sema.resolve_extern_crate(&extern_crate)?;
- let root_module = krate.root_module(sema.db);
- Some(Definition::Module(root_module))
+ sema.to_def(&rename.syntax().parent().and_then(ast::ExternCrate::cast)?)
+ .map(Definition::ExternCrateDecl)
}
}
}
@@ -427,7 +438,19 @@ impl OperatorClass {
#[derive(Debug)]
pub enum NameRefClass {
Definition(Definition),
- FieldShorthand { local_ref: Local, field_ref: Field },
+ FieldShorthand {
+ local_ref: Local,
+ field_ref: Field,
+ },
+ /// The specific situation where we have an extern crate decl without a rename
+ /// Here we have both a declaration and a reference.
+ /// ```rs
+ /// extern crate foo;
+ /// ```
+ ExternCrateShorthand {
+ decl: ExternCrateDecl,
+ krate: Crate,
+ },
}
impl NameRefClass {
@@ -513,10 +536,14 @@ impl NameRefClass {
}
None
},
- ast::ExternCrate(extern_crate) => {
- let krate = sema.resolve_extern_crate(&extern_crate)?;
- let root_module = krate.root_module(sema.db);
- Some(NameRefClass::Definition(Definition::Module(root_module)))
+ ast::ExternCrate(extern_crate_ast) => {
+ let extern_crate = sema.to_def(&extern_crate_ast)?;
+ let krate = extern_crate.resolved_crate(sema.db)?;
+ Some(if extern_crate_ast.rename().is_some() {
+ NameRefClass::Definition(Definition::Module(krate.root_module()))
+ } else {
+ NameRefClass::ExternCrateShorthand { krate, decl: extern_crate }
+ })
},
_ => None
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
index c8341fed1..b63dde2c2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -167,7 +167,7 @@ impl FamousDefs<'_, '_> {
lang_crate => lang_crate,
};
let std_crate = self.find_lang_crate(lang_crate)?;
- let mut module = std_crate.root_module(db);
+ let mut module = std_crate.root_module();
for segment in path {
module = module.children(db).find_map(|child| {
let name = child.name(db)?;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
index e488300b4..49b37024a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -931,22 +931,6 @@ $ cat $(find -name '*.s')
"##,
},
Lint {
- label: "abi_thiscall",
- description: r##"# `abi_thiscall`
-
-The tracking issue for this feature is: [#42202]
-
-[#42202]: https://github.com/rust-lang/rust/issues/42202
-
-------------------------
-
-The MSVC ABI on x86 Windows uses the `thiscall` calling convention for C++
-instance methods by default; it is identical to the usual (C) calling
-convention on x86 Windows except that the first parameter of the method,
-the `this` pointer, is passed in the ECX register.
-"##,
- },
- Lint {
label: "allocator_api",
description: r##"# `allocator_api`
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index eba9d8afc..1eb8f0002 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -9,7 +9,10 @@ use syntax::{
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
};
-use crate::{defs::Definition, generated, RootDatabase};
+use crate::{
+ defs::{Definition, IdentClass},
+ generated, RootDatabase,
+};
pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> {
match item {
@@ -109,3 +112,16 @@ pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool {
let source_root_id = db.file_source_root(root_file);
!db.source_root(source_root_id).is_library
}
+
+pub fn get_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<Definition> {
+ for token in sema.descend_into_macros(token) {
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
+ }
+ }
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index 901d592c6..e52dc3567 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -13,7 +13,7 @@ use syntax::{
use crate::{
helpers::item_name,
- items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT},
+ items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT},
RootDatabase,
};
@@ -317,7 +317,7 @@ fn path_applicable_imports(
// * improve the associated completion item matching and/or scoring to ensure no noisy completions appear
//
// see also an ignored test under FIXME comment in the qualify_path.rs module
- AssocItemSearch::Exclude,
+ AssocSearchMode::Exclude,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|item| {
@@ -334,7 +334,7 @@ fn path_applicable_imports(
sema,
current_crate,
path_candidate.name.clone(),
- AssocItemSearch::Include,
+ AssocSearchMode::Include,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|item| {
@@ -483,7 +483,7 @@ fn trait_applicable_items(
sema,
current_crate,
trait_candidate.assoc_item_name.clone(),
- AssocItemSearch::AssocItemsOnly,
+ AssocSearchMode::AssocItemsOnly,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|input| item_as_assoc(db, input))
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
index 46f1353e2..3f7a3ec2d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -3,10 +3,7 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
-use hir::{
- import_map::{self, ImportKind},
- AsAssocItem, Crate, ItemInNs, Semantics,
-};
+use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics};
use limit::Limit;
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
@@ -14,23 +11,14 @@ use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
/// A value to use, when uncertain which limit to pick.
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
-/// Three possible ways to search for the name in associated and/or other items.
-#[derive(Debug, Clone, Copy)]
-pub enum AssocItemSearch {
- /// Search for the name in both associated and other items.
- Include,
- /// Search for the name in other items only.
- Exclude,
- /// Search for the name in the associated items only.
- AssocItemsOnly,
-}
+pub use import_map::AssocSearchMode;
/// Searches for importable items with the given name in the crate and its dependencies.
pub fn items_with_name<'a>(
sema: &'a Semantics<'_, RootDatabase>,
krate: Crate,
name: NameToImport,
- assoc_item_search: AssocItemSearch,
+ assoc_item_search: AssocSearchMode,
limit: Option<usize>,
) -> impl Iterator<Item = ItemInNs> + 'a {
let _p = profile::span("items_with_name").detail(|| {
@@ -48,9 +36,7 @@ pub fn items_with_name<'a>(
let mut local_query = symbol_index::Query::new(exact_name.clone());
local_query.exact();
- let external_query = import_map::Query::new(exact_name)
- .name_only()
- .search_mode(import_map::SearchMode::Equals);
+ let external_query = import_map::Query::new(exact_name);
(
local_query,
@@ -61,17 +47,8 @@ pub fn items_with_name<'a>(
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
- .search_mode(import_map::SearchMode::Fuzzy)
- .name_only();
- match assoc_item_search {
- AssocItemSearch::Include => {}
- AssocItemSearch::Exclude => {
- external_query = external_query.exclude_import_kind(ImportKind::AssociatedItem);
- }
- AssocItemSearch::AssocItemsOnly => {
- external_query = external_query.assoc_items_only();
- }
- }
+ .fuzzy()
+ .assoc_search_mode(assoc_item_search);
if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
local_query.case_sensitive();
@@ -93,13 +70,15 @@ pub fn items_with_name<'a>(
fn find_items<'a>(
sema: &'a Semantics<'_, RootDatabase>,
krate: Crate,
- assoc_item_search: AssocItemSearch,
+ assoc_item_search: AssocSearchMode,
local_query: symbol_index::Query,
external_query: import_map::Query,
) -> impl Iterator<Item = ItemInNs> + 'a {
let _p = profile::span("find_items");
let db = sema.db;
+ // NOTE: `external_query` includes `assoc_item_search`, so we don't need to
+ // filter on our own.
let external_importables =
krate.query_external_importables(db, external_query).map(|external_importable| {
match external_importable {
@@ -112,18 +91,15 @@ fn find_items<'a>(
let local_results = local_query
.search(&symbol_index::crate_symbols(db, krate))
.into_iter()
- .filter_map(|local_candidate| match local_candidate.def {
- hir::ModuleDef::Macro(macro_def) => Some(ItemInNs::Macros(macro_def)),
- def => Some(ItemInNs::from(def)),
+ .filter(move |candidate| match assoc_item_search {
+ AssocSearchMode::Include => true,
+ AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(),
+ AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(),
+ })
+ .map(|local_candidate| match local_candidate.def {
+ hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
+ def => ItemInNs::from(def),
});
- external_importables.chain(local_results).filter(move |&item| match assoc_item_search {
- AssocItemSearch::Include => true,
- AssocItemSearch::Exclude => !is_assoc_item(item, sema.db),
- AssocItemSearch::AssocItemsOnly => is_assoc_item(item, sema.db),
- })
-}
-
-fn is_assoc_item(item: ItemInNs, db: &RootDatabase) -> bool {
- item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)).is_some()
+ external_importables.chain(local_results)
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index ff1a20f03..f27ed485d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -200,8 +200,8 @@ impl RootDatabase {
hir_db::AstIdMapQuery
// hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery
- hir_db::MacroArgTextQuery
- hir_db::MacroDefQuery
+ hir_db::MacroArgNodeQuery
+ hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery
hir_db::HygieneFrameQuery
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index 73e6a920e..1d0cb426a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -195,7 +195,7 @@ fn postorder(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> {
})
}
-impl<'a> Ctx<'a> {
+impl Ctx<'_> {
fn apply(&self, item: &SyntaxNode) {
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 52a23b4b8..aa0bb7cce 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -82,8 +82,9 @@ impl Definition {
}
/// Textual range of the identifier which will change when renaming this
- /// `Definition`. Note that some definitions, like builtin types, can't be
- /// renamed.
+ /// `Definition`. Note that builtin types can't be
+ /// renamed and extern crate names will report its range, though a rename will introduce
+ /// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let res = match self {
Definition::Macro(mac) => {
@@ -146,6 +147,16 @@ impl Definition {
let lifetime = src.value.lifetime()?;
src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
}
+ Definition::ExternCrateDecl(it) => {
+ let src = it.source(sema.db)?;
+ if let Some(rename) = src.value.rename() {
+ let name = rename.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ } else {
+ let name = src.value.name_ref()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ }
Definition::BuiltinType(_) => return None,
Definition::SelfType(_) => return None,
Definition::BuiltinAttr(_) => return None,
@@ -526,6 +537,9 @@ fn source_edit_from_def(
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
),
+ Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => {
+ (TextRange::empty(range.end()), format!(" as {new_name}"))
+ }
_ => (range, new_name.to_owned()),
};
edit.replace(range, new_name);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index e8ff107bd..d5abd0991 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -127,7 +127,7 @@ impl SearchScope {
}
/// Build a search scope spanning the given module and all its submodules.
- fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
+ pub fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
let mut entries = IntMap::default();
let (file_id, range) = {
@@ -149,10 +149,8 @@ impl SearchScope {
let mut to_visit: Vec<_> = module.children(db).collect();
while let Some(module) = to_visit.pop() {
- if let InFile { file_id, value: ModuleSource::SourceFile(_) } =
- module.definition_source(db)
- {
- entries.insert(file_id.original_file(db), None);
+ if let Some(file_id) = module.as_source_file_id(db) {
+ entries.insert(file_id, None);
}
to_visit.extend(module.children(db));
}
@@ -331,7 +329,7 @@ impl Definition {
pub struct FindUsages<'a> {
def: Definition,
sema: &'a Semantics<'a, RootDatabase>,
- scope: Option<SearchScope>,
+ scope: Option<&'a SearchScope>,
/// The container of our definition should it be an assoc item
assoc_item_container: Option<hir::AssocItemContainer>,
/// whether to search for the `Self` type of the definition
@@ -342,19 +340,19 @@ pub struct FindUsages<'a> {
impl<'a> FindUsages<'a> {
/// Enable searching for `Self` when the definition is a type or `self` for modules.
- pub fn include_self_refs(mut self) -> FindUsages<'a> {
+ pub fn include_self_refs(mut self) -> Self {
self.include_self_kw_refs = def_to_ty(self.sema, &self.def);
self.search_self_mod = true;
self
}
/// Limit the search to a given [`SearchScope`].
- pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> {
+ pub fn in_scope(self, scope: &'a SearchScope) -> Self {
self.set_scope(Some(scope))
}
/// Limit the search to a given [`SearchScope`].
- pub fn set_scope(mut self, scope: Option<SearchScope>) -> FindUsages<'a> {
+ pub fn set_scope(mut self, scope: Option<&'a SearchScope>) -> Self {
assert!(self.scope.is_none());
self.scope = scope;
self
@@ -378,7 +376,7 @@ impl<'a> FindUsages<'a> {
res
}
- fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
+ pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
let _p = profile::span("FindUsages:search");
let sema = self.sema;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 061fb0f05..39763479c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -7,14 +7,17 @@ use std::{collections::hash_map::Entry, iter, mem};
use crate::SnippetCap;
use base_db::{AnchoredPathBuf, FileId};
+use itertools::Itertools;
use nohash_hasher::IntMap;
use stdx::never;
-use syntax::{algo, ast, ted, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
+use syntax::{
+ algo, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
+};
use text_edit::{TextEdit, TextEditBuilder};
#[derive(Default, Debug, Clone)]
pub struct SourceChange {
- pub source_file_edits: IntMap<FileId, TextEdit>,
+ pub source_file_edits: IntMap<FileId, (TextEdit, Option<SnippetEdit>)>,
pub file_system_edits: Vec<FileSystemEdit>,
pub is_snippet: bool,
}
@@ -23,7 +26,7 @@ impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub fn from_edits(
- source_file_edits: IntMap<FileId, TextEdit>,
+ source_file_edits: IntMap<FileId, (TextEdit, Option<SnippetEdit>)>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
@@ -31,7 +34,7 @@ impl SourceChange {
pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
SourceChange {
- source_file_edits: iter::once((file_id, edit)).collect(),
+ source_file_edits: iter::once((file_id, (edit, None))).collect(),
..Default::default()
}
}
@@ -39,12 +42,31 @@ impl SourceChange {
/// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
/// edits for a file if some already exist.
pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
+ self.insert_source_and_snippet_edit(file_id, edit, None)
+ }
+
+ /// Inserts a [`TextEdit`] and potentially a [`SnippetEdit`] for the given [`FileId`].
+ /// This properly handles merging existing edits for a file if some already exist.
+ pub fn insert_source_and_snippet_edit(
+ &mut self,
+ file_id: FileId,
+ edit: TextEdit,
+ snippet_edit: Option<SnippetEdit>,
+ ) {
match self.source_file_edits.entry(file_id) {
Entry::Occupied(mut entry) => {
- never!(entry.get_mut().union(edit).is_err(), "overlapping edits for same file");
+ let value = entry.get_mut();
+ never!(value.0.union(edit).is_err(), "overlapping edits for same file");
+ never!(
+ value.1.is_some() && snippet_edit.is_some(),
+ "overlapping snippet edits for same file"
+ );
+ if value.1.is_none() {
+ value.1 = snippet_edit;
+ }
}
Entry::Vacant(entry) => {
- entry.insert(edit);
+ entry.insert((edit, snippet_edit));
}
}
}
@@ -53,7 +75,10 @@ impl SourceChange {
self.file_system_edits.push(edit);
}
- pub fn get_source_edit(&self, file_id: FileId) -> Option<&TextEdit> {
+ pub fn get_source_and_snippet_edit(
+ &self,
+ file_id: FileId,
+ ) -> Option<&(TextEdit, Option<SnippetEdit>)> {
self.source_file_edits.get(&file_id)
}
@@ -67,7 +92,18 @@ impl SourceChange {
impl Extend<(FileId, TextEdit)> for SourceChange {
fn extend<T: IntoIterator<Item = (FileId, TextEdit)>>(&mut self, iter: T) {
- iter.into_iter().for_each(|(file_id, edit)| self.insert_source_edit(file_id, edit));
+ self.extend(iter.into_iter().map(|(file_id, edit)| (file_id, (edit, None))))
+ }
+}
+
+impl Extend<(FileId, (TextEdit, Option<SnippetEdit>))> for SourceChange {
+ fn extend<T: IntoIterator<Item = (FileId, (TextEdit, Option<SnippetEdit>))>>(
+ &mut self,
+ iter: T,
+ ) {
+ iter.into_iter().for_each(|(file_id, (edit, snippet_edit))| {
+ self.insert_source_and_snippet_edit(file_id, edit, snippet_edit)
+ });
}
}
@@ -79,6 +115,8 @@ impl Extend<FileSystemEdit> for SourceChange {
impl From<IntMap<FileId, TextEdit>> for SourceChange {
fn from(source_file_edits: IntMap<FileId, TextEdit>) -> SourceChange {
+ let source_file_edits =
+ source_file_edits.into_iter().map(|(file_id, edit)| (file_id, (edit, None))).collect();
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
}
}
@@ -91,6 +129,65 @@ impl FromIterator<(FileId, TextEdit)> for SourceChange {
}
}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct SnippetEdit(Vec<(u32, TextRange)>);
+
+impl SnippetEdit {
+ pub fn new(snippets: Vec<Snippet>) -> Self {
+ let mut snippet_ranges = snippets
+ .into_iter()
+ .zip(1..)
+ .with_position()
+ .map(|pos| {
+ let (snippet, index) = match pos {
+ itertools::Position::First(it) | itertools::Position::Middle(it) => it,
+ // last/only snippet gets index 0
+ itertools::Position::Last((snippet, _))
+ | itertools::Position::Only((snippet, _)) => (snippet, 0),
+ };
+
+ let range = match snippet {
+ Snippet::Tabstop(pos) => TextRange::empty(pos),
+ Snippet::Placeholder(range) => range,
+ };
+ (index, range)
+ })
+ .collect_vec();
+
+ snippet_ranges.sort_by_key(|(_, range)| range.start());
+
+ // Ensure that none of the ranges overlap
+ let disjoint_ranges = snippet_ranges
+ .iter()
+ .zip(snippet_ranges.iter().skip(1))
+ .all(|((_, left), (_, right))| left.end() <= right.start() || left == right);
+ stdx::always!(disjoint_ranges);
+
+ SnippetEdit(snippet_ranges)
+ }
+
+ /// Inserts all of the snippets into the given text.
+ pub fn apply(&self, text: &mut String) {
+ // Start from the back so that we don't have to adjust ranges
+ for (index, range) in self.0.iter().rev() {
+ if range.is_empty() {
+ // is a tabstop
+ text.insert_str(range.start().into(), &format!("${index}"));
+ } else {
+ // is a placeholder
+ text.insert(range.end().into(), '}');
+ text.insert_str(range.start().into(), &format!("${{{index}:"));
+ }
+ }
+ }
+
+ /// Gets the underlying snippet index + text range
+ /// Tabstops are represented by an empty range, and placeholders use the range that they were given
+ pub fn into_edit_ranges(self) -> Vec<(u32, TextRange)> {
+ self.0
+ }
+}
+
pub struct SourceChangeBuilder {
pub edit: TextEditBuilder,
pub file_id: FileId,
@@ -149,24 +246,19 @@ impl SourceChangeBuilder {
}
fn commit(&mut self) {
- // Render snippets first so that they get bundled into the tree diff
- if let Some(mut snippets) = self.snippet_builder.take() {
- // Last snippet always has stop index 0
- let last_stop = snippets.places.pop().unwrap();
- last_stop.place(0);
-
- for (index, stop) in snippets.places.into_iter().enumerate() {
- stop.place(index + 1)
- }
- }
+ let snippet_edit = self.snippet_builder.take().map(|builder| {
+ SnippetEdit::new(
+ builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(),
+ )
+ });
if let Some(tm) = self.mutated_tree.take() {
- algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
+ algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit);
}
let edit = mem::take(&mut self.edit).finish();
- if !edit.is_empty() {
- self.source_change.insert_source_edit(self.file_id, edit);
+ if !edit.is_empty() || snippet_edit.is_some() {
+ self.source_change.insert_source_and_snippet_edit(self.file_id, edit, snippet_edit);
}
}
@@ -237,19 +329,31 @@ impl SourceChangeBuilder {
/// Adds a tabstop snippet to place the cursor before `node`
pub fn add_tabstop_before(&mut self, _cap: SnippetCap, node: impl AstNode) {
assert!(node.syntax().parent().is_some());
- self.add_snippet(PlaceSnippet::Before(node.syntax().clone()));
+ self.add_snippet(PlaceSnippet::Before(node.syntax().clone().into()));
}
/// Adds a tabstop snippet to place the cursor after `node`
pub fn add_tabstop_after(&mut self, _cap: SnippetCap, node: impl AstNode) {
assert!(node.syntax().parent().is_some());
- self.add_snippet(PlaceSnippet::After(node.syntax().clone()));
+ self.add_snippet(PlaceSnippet::After(node.syntax().clone().into()));
+ }
+
+ /// Adds a tabstop snippet to place the cursor before `token`
+ pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
+ assert!(token.parent().is_some());
+ self.add_snippet(PlaceSnippet::Before(token.clone().into()));
+ }
+
+ /// Adds a tabstop snippet to place the cursor after `token`
+ pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
+ assert!(token.parent().is_some());
+ self.add_snippet(PlaceSnippet::After(token.clone().into()));
}
/// Adds a snippet to move the cursor selected over `node`
pub fn add_placeholder_snippet(&mut self, _cap: SnippetCap, node: impl AstNode) {
assert!(node.syntax().parent().is_some());
- self.add_snippet(PlaceSnippet::Over(node.syntax().clone()))
+ self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
}
fn add_snippet(&mut self, snippet: PlaceSnippet) {
@@ -260,6 +364,16 @@ impl SourceChangeBuilder {
pub fn finish(mut self) -> SourceChange {
self.commit();
+
+ // Only one file can have snippet edits
+ stdx::never!(self
+ .source_change
+ .source_file_edits
+ .iter()
+ .filter(|(_, (_, snippet_edit))| snippet_edit.is_some())
+ .at_most_one()
+ .is_err());
+
mem::take(&mut self.source_change)
}
}
@@ -281,65 +395,28 @@ impl From<FileSystemEdit> for SourceChange {
}
}
+pub enum Snippet {
+ /// A tabstop snippet (e.g. `$0`).
+ Tabstop(TextSize),
+ /// A placeholder snippet (e.g. `${0:placeholder}`).
+ Placeholder(TextRange),
+}
+
enum PlaceSnippet {
- /// Place a tabstop before a node
- Before(SyntaxNode),
- /// Place a tabstop before a node
- After(SyntaxNode),
- /// Place a placeholder snippet in place of the node
- Over(SyntaxNode),
+ /// Place a tabstop before an element
+ Before(SyntaxElement),
+ /// Place a tabstop before an element
+ After(SyntaxElement),
+ /// Place a placeholder snippet in place of the element
+ Over(SyntaxElement),
}
impl PlaceSnippet {
- /// Places the snippet before or over a node with the given tab stop index
- fn place(self, order: usize) {
- // ensure the target node is still attached
- match &self {
- PlaceSnippet::Before(node) | PlaceSnippet::After(node) | PlaceSnippet::Over(node) => {
- // node should still be in the tree, but if it isn't
- // then it's okay to just ignore this place
- if stdx::never!(node.parent().is_none()) {
- return;
- }
- }
- }
-
+ fn finalize_position(self) -> Snippet {
match self {
- PlaceSnippet::Before(node) => {
- ted::insert_raw(ted::Position::before(&node), Self::make_tab_stop(order));
- }
- PlaceSnippet::After(node) => {
- ted::insert_raw(ted::Position::after(&node), Self::make_tab_stop(order));
- }
- PlaceSnippet::Over(node) => {
- let position = ted::Position::before(&node);
- node.detach();
-
- let snippet = ast::SourceFile::parse(&format!("${{{order}:_}}"))
- .syntax_node()
- .clone_for_update();
-
- let placeholder =
- snippet.descendants().find_map(ast::UnderscoreExpr::cast).unwrap();
- ted::replace(placeholder.syntax(), node);
-
- ted::insert_raw(position, snippet);
- }
+ PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()),
+ PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()),
+ PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()),
}
}
-
- fn make_tab_stop(order: usize) -> SyntaxNode {
- let stop = ast::SourceFile::parse(&format!("stop!(${order})"))
- .syntax_node()
- .descendants()
- .find_map(ast::TokenTree::cast)
- .unwrap()
- .syntax()
- .clone_for_update();
-
- stop.first_token().unwrap().detach();
- stop.last_token().unwrap().detach();
-
- stop
- }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index e18624fcc..14aa39401 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1"
either = "1.7.0"
itertools = "0.10.5"
serde_json = "1.0.86"
+once_cell = "1.17.0"
# local deps
profile.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
index 30576c71f..491005403 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: break-outside-of-loop
//
@@ -13,10 +13,11 @@ pub(crate) fn break_outside_of_loop(
let construct = if d.is_break { "break" } else { "continue" };
format!("{construct} outside of loop")
};
- Diagnostic::new(
- "break-outside-of-loop",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0268"),
message,
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
index d2f27664d..e1e5db91c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
@@ -1,6 +1,6 @@
use hir::HirDisplay;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: expected-function
//
@@ -9,10 +9,11 @@ pub(crate) fn expected_function(
ctx: &DiagnosticsContext<'_>,
d: &hir::ExpectedFunction,
) -> Diagnostic {
- Diagnostic::new(
- "expected-function",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0618"),
format!("expected function, found {}", d.found.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.call.clone().map(|it| it.into())).range,
+ d.call.clone().map(|it| it.into()),
)
.experimental()
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
index 2b7105362..3b69640af 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -5,7 +5,7 @@ use ide_db::{base_db::FileId, source_change::SourceChange};
use syntax::{ast, match_ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode};
pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) {
match_ast! {
@@ -46,14 +46,17 @@ fn check_expr_field_shorthand(
let field_range = record_field.syntax().text_range();
acc.push(
- Diagnostic::new("use-field-shorthand", "Shorthand struct initialization", field_range)
- .severity(Severity::WeakWarning)
- .with_fixes(Some(vec![fix(
- "use_expr_field_shorthand",
- "Use struct shorthand initialization",
- SourceChange::from_text_edit(file_id, edit),
- field_range,
- )])),
+ Diagnostic::new(
+ DiagnosticCode::Clippy("redundant_field_names"),
+ "Shorthand struct initialization",
+ field_range,
+ )
+ .with_fixes(Some(vec![fix(
+ "use_expr_field_shorthand",
+ "Use struct shorthand initialization",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
);
}
}
@@ -87,14 +90,17 @@ fn check_pat_field_shorthand(
let field_range = record_pat_field.syntax().text_range();
acc.push(
- Diagnostic::new("use-field-shorthand", "Shorthand struct pattern", field_range)
- .severity(Severity::WeakWarning)
- .with_fixes(Some(vec![fix(
- "use_pat_field_shorthand",
- "Use struct field shorthand",
- SourceChange::from_text_edit(file_id, edit),
- field_range,
- )])),
+ Diagnostic::new(
+ DiagnosticCode::Clippy("redundant_field_names"),
+ "Shorthand struct pattern",
+ field_range,
+ )
+ .with_fixes(Some(vec![fix(
+ "use_pat_field_shorthand",
+ "Use struct field shorthand",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index f558b7256..9eb763d3e 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -1,7 +1,7 @@
use cfg::DnfExpr;
use stdx::format_to;
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: inactive-code
//
@@ -27,13 +27,12 @@ pub(crate) fn inactive_code(
format_to!(message, ": {}", inactive);
}
}
-
+ // FIXME: This shouldn't be a diagnostic
let res = Diagnostic::new(
- "inactive-code",
+ DiagnosticCode::Ra("inactive-code", Severity::WeakWarning),
message,
ctx.sema.diagnostics_display_range(d.node.clone()).range,
)
- .severity(Severity::WeakWarning)
.with_unused(true);
Some(res)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
index 72af9ebfc..4afb4db03 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
@@ -1,17 +1,17 @@
use hir::InFile;
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: incoherent-impl
//
// This diagnostic is triggered if the targe type of an impl is from a foreign crate.
pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic {
- Diagnostic::new(
- "incoherent-impl",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0210"),
format!("cannot define inherent `impl` for foreign type"),
- ctx.sema.diagnostics_display_range(InFile::new(d.file_id, d.impl_.clone().into())).range,
+ InFile::new(d.file_id, d.impl_.clone().into()),
)
- .severity(Severity::Error)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 90279e145..235062bf5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -1,4 +1,4 @@
-use hir::{db::ExpandDatabase, InFile};
+use hir::{db::ExpandDatabase, CaseType, InFile};
use ide_db::{assists::Assist, defs::NameClass};
use syntax::AstNode;
@@ -6,23 +6,29 @@ use crate::{
// references::rename::rename_with_semantics,
unresolved_fix,
Diagnostic,
+ DiagnosticCode,
DiagnosticsContext,
- Severity,
};
// Diagnostic: incorrect-ident-case
//
// This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention].
pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic {
- Diagnostic::new(
- "incorrect-ident-case",
+ let code = match d.expected_case {
+ CaseType::LowerSnakeCase => DiagnosticCode::RustcLint("non_snake_case"),
+ CaseType::UpperSnakeCase => DiagnosticCode::RustcLint("non_upper_case_globals"),
+ // The name is lying. It also covers variants, traits, ...
+ CaseType::UpperCamelCase => DiagnosticCode::RustcLint("non_camel_case_types"),
+ };
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ code,
format!(
"{} `{}` should have {} name, e.g. `{}`",
d.ident_type, d.ident_text, d.expected_case, d.suggested_text
),
- ctx.sema.diagnostics_display_range(InFile::new(d.file, d.ident.clone().into())).range,
+ InFile::new(d.file, d.ident.clone().into()),
)
- .severity(Severity::WeakWarning)
.with_fixes(fixes(ctx, d))
}
@@ -149,7 +155,7 @@ impl TestStruct {
check_diagnostics(
r#"
fn FOO() {}
-// ^^^ 💡 weak: Function `FOO` should have snake_case name, e.g. `foo`
+// ^^^ 💡 warn: Function `FOO` should have snake_case name, e.g. `foo`
"#,
);
check_fix(r#"fn FOO$0() {}"#, r#"fn foo() {}"#);
@@ -160,7 +166,7 @@ fn FOO() {}
check_diagnostics(
r#"
fn NonSnakeCaseName() {}
-// ^^^^^^^^^^^^^^^^ 💡 weak: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
+// ^^^^^^^^^^^^^^^^ 💡 warn: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
"#,
);
}
@@ -170,10 +176,10 @@ fn NonSnakeCaseName() {}
check_diagnostics(
r#"
fn foo(SomeParam: u8) {}
- // ^^^^^^^^^ 💡 weak: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
+ // ^^^^^^^^^ 💡 warn: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
- // ^^^^^^^^^^ 💡 weak: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
+ // ^^^^^^^^^^ 💡 warn: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
"#,
);
}
@@ -184,9 +190,9 @@ fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
r#"
fn foo() {
let SOME_VALUE = 10;
- // ^^^^^^^^^^ 💡 weak: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
+ // ^^^^^^^^^^ 💡 warn: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
let AnotherValue = 20;
- // ^^^^^^^^^^^^ 💡 weak: Variable `AnotherValue` should have snake_case name, e.g. `another_value`
+ // ^^^^^^^^^^^^ 💡 warn: Variable `AnotherValue` should have snake_case name, e.g. `another_value`
}
"#,
);
@@ -197,10 +203,10 @@ fn foo() {
check_diagnostics(
r#"
struct non_camel_case_name {}
- // ^^^^^^^^^^^^^^^^^^^ 💡 weak: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
+ // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
struct SCREAMING_CASE {}
- // ^^^^^^^^^^^^^^ 💡 weak: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
+ // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
"#,
);
}
@@ -219,7 +225,7 @@ struct AABB {}
check_diagnostics(
r#"
struct SomeStruct { SomeField: u8 }
- // ^^^^^^^^^ 💡 weak: Field `SomeField` should have snake_case name, e.g. `some_field`
+ // ^^^^^^^^^ 💡 warn: Field `SomeField` should have snake_case name, e.g. `some_field`
"#,
);
}
@@ -229,10 +235,10 @@ struct SomeStruct { SomeField: u8 }
check_diagnostics(
r#"
enum some_enum { Val(u8) }
- // ^^^^^^^^^ 💡 weak: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
+ // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
enum SOME_ENUM {}
- // ^^^^^^^^^ 💡 weak: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
+ // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
"#,
);
}
@@ -251,7 +257,7 @@ enum AABB {}
check_diagnostics(
r#"
enum SomeEnum { SOME_VARIANT(u8) }
- // ^^^^^^^^^^^^ 💡 weak: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
+ // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
"#,
);
}
@@ -261,7 +267,7 @@ enum SomeEnum { SOME_VARIANT(u8) }
check_diagnostics(
r#"
const some_weird_const: u8 = 10;
- // ^^^^^^^^^^^^^^^^ 💡 weak: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+ // ^^^^^^^^^^^^^^^^ 💡 warn: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
"#,
);
}
@@ -271,7 +277,7 @@ const some_weird_const: u8 = 10;
check_diagnostics(
r#"
static some_weird_const: u8 = 10;
- // ^^^^^^^^^^^^^^^^ 💡 weak: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+ // ^^^^^^^^^^^^^^^^ 💡 warn: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
"#,
);
}
@@ -281,13 +287,13 @@ static some_weird_const: u8 = 10;
check_diagnostics(
r#"
struct someStruct;
- // ^^^^^^^^^^ 💡 weak: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
+ // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
impl someStruct {
fn SomeFunc(&self) {
- // ^^^^^^^^ 💡 weak: Function `SomeFunc` should have snake_case name, e.g. `some_func`
+ // ^^^^^^^^ 💡 warn: Function `SomeFunc` should have snake_case name, e.g. `some_func`
let WHY_VAR_IS_CAPS = 10;
- // ^^^^^^^^^^^^^^^ 💡 weak: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
+ // ^^^^^^^^^^^^^^^ 💡 warn: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
}
}
"#,
@@ -319,7 +325,7 @@ enum Option { Some, None }
fn main() {
match Option::None {
SOME_VAR @ None => (),
- // ^^^^^^^^ 💡 weak: Variable `SOME_VAR` should have snake_case name, e.g. `some_var`
+ // ^^^^^^^^ 💡 warn: Variable `SOME_VAR` should have snake_case name, e.g. `some_var`
Some => (),
}
}
@@ -461,7 +467,7 @@ mod CheckNonstandardStyle {
#[allow(bad_style)]
mod CheckBadStyle {
- fn HiImABadFnName() {}
+ struct fooo;
}
mod F {
@@ -483,4 +489,156 @@ pub static SomeStatic: u8 = 10;
"#,
);
}
+
+ #[test]
+ fn deny_attributes() {
+ check_diagnostics(
+ r#"
+#[deny(non_snake_case)]
+fn NonSnakeCaseName(some_var: u8) -> u8 {
+ //^^^^^^^^^^^^^^^^ 💡 error: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
+ // cov_flags generated output from elsewhere in this file
+ extern "C" {
+ #[no_mangle]
+ static lower_case: u8;
+ }
+
+ let OtherVar = some_var + 1;
+ //^^^^^^^^ 💡 error: Variable `OtherVar` should have snake_case name, e.g. `other_var`
+ OtherVar
+}
+
+#[deny(nonstandard_style)]
+mod CheckNonstandardStyle {
+ fn HiImABadFnName() {}
+ //^^^^^^^^^^^^^^ 💡 error: Function `HiImABadFnName` should have snake_case name, e.g. `hi_im_abad_fn_name`
+}
+
+#[deny(warnings)]
+mod CheckBadStyle {
+ struct fooo;
+ //^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
+}
+
+mod F {
+ #![deny(non_snake_case)]
+ fn CheckItWorksWithModAttr() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr`
+}
+
+#[deny(non_snake_case, non_camel_case_types)]
+pub struct some_type {
+ //^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType`
+ SOME_FIELD: u8,
+ //^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field`
+ SomeField: u16,
+ //^^^^^^^^^ 💡 error: Field `SomeField` should have snake_case name, e.g. `some_field`
+}
+
+#[deny(non_upper_case_globals)]
+pub const some_const: u8 = 10;
+ //^^^^^^^^^^ 💡 error: Constant `some_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_CONST`
+
+#[deny(non_upper_case_globals)]
+pub static SomeStatic: u8 = 10;
+ //^^^^^^^^^^ 💡 error: Static variable `SomeStatic` should have UPPER_SNAKE_CASE name, e.g. `SOME_STATIC`
+ "#,
+ );
+ }
+
+ #[test]
+ fn fn_inner_items() {
+ check_diagnostics(
+ r#"
+fn main() {
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ let INNER_INNER = 42;
+ //^^^^^^^^^^^ 💡 warn: Variable `INNER_INNER` should have snake_case name, e.g. `inner_inner`
+ }
+
+ let INNER_LOCAL = 42;
+ //^^^^^^^^^^^ 💡 warn: Variable `INNER_LOCAL` should have snake_case name, e.g. `inner_local`
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn const_body_inner_items() {
+ check_diagnostics(
+ r#"
+const _: () = {
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+
+ const foo: () = {
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ };
+};
+"#,
+ );
+ }
+
+ #[test]
+ fn static_body_inner_items() {
+ check_diagnostics(
+ r#"
+static FOO: () = {
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+
+ static bar: () = {
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ };
+};
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_variant_body_inner_item() {
+ check_diagnostics(
+ r#"
+enum E {
+ A = {
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ 42
+ },
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index c779266bc..1ec17952b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: invalid-derive-target
//
@@ -11,11 +11,10 @@ pub(crate) fn invalid_derive_target(
let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
Diagnostic::new(
- "invalid-derive-target",
+ DiagnosticCode::RustcHardError("E0774"),
"`derive` may only be applied to `struct`s, `enum`s and `union`s",
display_range,
)
- .severity(Severity::Error)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index 04ce1e0fe..a337e2660 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -17,7 +17,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsConfig, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity};
#[derive(Default)]
struct State {
@@ -117,11 +117,10 @@ pub(crate) fn json_in_items(
edit.insert(range.start(), state.result);
acc.push(
Diagnostic::new(
- "json-is-not-rust",
+ DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
"JSON syntax is not valid as a Rust item",
range,
)
- .severity(Severity::WeakWarning)
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(file_id);
let scope = match import_scope {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 7547779a9..f54cdd63b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: macro-error
//
@@ -6,7 +6,12 @@ use crate::{Diagnostic, DiagnosticsContext};
pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
// Use more accurate position if available.
let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
- Diagnostic::new("macro-error", d.message.clone(), display_range).experimental()
+ Diagnostic::new(
+ DiagnosticCode::Ra("macro-error", Severity::Error),
+ d.message.clone(),
+ display_range,
+ )
+ .experimental()
}
// Diagnostic: macro-error
@@ -16,7 +21,12 @@ pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefErr
// Use more accurate position if available.
let display_range =
ctx.resolve_precise_location(&d.node.clone().map(|it| it.syntax_node_ptr()), d.name);
- Diagnostic::new("macro-def-error", d.message.clone(), display_range).experimental()
+ Diagnostic::new(
+ DiagnosticCode::Ra("macro-def-error", Severity::Error),
+ d.message.clone(),
+ display_range,
+ )
+ .experimental()
}
#[cfg(test)]
@@ -41,6 +51,9 @@ macro_rules! compile_error { () => {} }
compile_error!("compile_error macro works");
//^^^^^^^^^^^^^ error: compile_error macro works
+
+ compile_error! { "compile_error macro braced works" }
+//^^^^^^^^^^^^^ error: compile_error macro braced works
"#,
);
}
@@ -67,7 +80,7 @@ macro_rules! m {
fn f() {
m!();
- //^^^^ error: unresolved macro `$crate::private::concat!`
+ //^^^^ error: unresolved macro $crate::private::concat
}
//- /core.rs crate:core
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index cd48bdba0..fc57dde69 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: malformed-derive
//
@@ -10,11 +10,10 @@ pub(crate) fn malformed_derive(
let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
Diagnostic::new(
- "malformed-derive",
+ DiagnosticCode::RustcHardError("E0777"),
"malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`",
display_range,
)
- .severity(Severity::Error)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index c5db8c374..6238c7e09 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -3,7 +3,7 @@ use syntax::{
AstNode, TextRange,
};
-use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext};
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: mismatched-arg-count
//
@@ -14,7 +14,7 @@ pub(crate) fn mismatched_arg_count(
) -> Diagnostic {
let s = if d.expected == 1 { "" } else { "s" };
let message = format!("expected {} argument{s}, found {}", d.expected, d.found);
- Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d))
+ Diagnostic::new(DiagnosticCode::RustcHardError("E0107"), message, invalid_args_range(ctx, d))
}
fn invalid_args_range(ctx: &DiagnosticsContext<'_>, d: &hir::MismatchedArgCount) -> TextRange {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 60ccc41df..acc31cd11 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -15,7 +15,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsContext};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: missing-fields
//
@@ -42,7 +42,7 @@ pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingField
.unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
);
- Diagnostic::new("missing-fields", message, ctx.sema.diagnostics_display_range(ptr).range)
+ Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr)
.with_fixes(fixes(ctx, d))
}
@@ -208,7 +208,7 @@ fn get_default_constructor(
}
let krate = ctx.sema.to_module_def(d.file.original_file(ctx.sema.db))?.krate();
- let module = krate.root_module(ctx.sema.db);
+ let module = krate.root_module();
// Look for a ::new() associated function
let has_new_func = ty
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 3f13b97a4..82a9a3bd5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: missing-match-arm
//
@@ -7,10 +7,11 @@ pub(crate) fn missing_match_arms(
ctx: &DiagnosticsContext<'_>,
d: &hir::MissingMatchArms,
) -> Diagnostic {
- Diagnostic::new(
- "missing-match-arm",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0004"),
format!("missing match arm: {}", d.uncovered_patterns),
- ctx.sema.diagnostics_display_range(d.scrutinee_expr.clone().map(Into::into)).range,
+ d.scrutinee_expr.clone().map(Into::into),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 2026b6fce..70b26009b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -4,16 +4,17 @@ use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsContext};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: missing-unsafe
//
// This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic {
- Diagnostic::new(
- "missing-unsafe",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0133"),
"this operation is unsafe and requires an unsafe function or block",
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 32e321107..3aa4aa970 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -1,14 +1,15 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
use hir::HirDisplay;
// Diagnostic: moved-out-of-ref
//
// This diagnostic is triggered on moving non copy things out of references.
pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOfRef) -> Diagnostic {
- Diagnostic::new(
- "moved-out-of-ref",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0507"),
format!("cannot move `{}` out of reference", d.ty.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.span.clone()).range,
+ d.span.clone(),
)
.experimental() // spans are broken, and I'm not sure how precise we can detect copy types
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index f61460e31..e0c3bedce 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -2,7 +2,7 @@ use ide_db::source_change::SourceChange;
use syntax::{AstNode, SyntaxKind, SyntaxNode, SyntaxToken, T};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsContext, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: need-mut
//
@@ -29,13 +29,15 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagno
use_range,
)])
})();
- Diagnostic::new(
- "need-mut",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ // FIXME: `E0384` is not the only error that this diagnostic handles
+ DiagnosticCode::RustcHardError("E0384"),
format!(
"cannot mutate immutable variable `{}`",
d.local.name(ctx.sema.db).display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.span.clone()).range,
+ d.span.clone(),
)
.with_fixes(fixes)
}
@@ -68,12 +70,12 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Di
)])
})();
let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
- Diagnostic::new(
- "unused-mut",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcLint("unused_mut"),
"variable does not need to be mutable",
- ctx.sema.diagnostics_display_range(ast).range,
+ ast,
)
- .severity(Severity::WeakWarning)
.experimental() // Not supporting `#[allow(unused_mut)]` leads to false positive.
.with_fixes(fixes)
}
@@ -93,7 +95,7 @@ mod tests {
fn f(_: i32) {}
fn main() {
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x);
}
"#,
@@ -268,7 +270,7 @@ fn main() {
fn f(_: i32) {}
fn main() {
let mut x = (2, 7);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x.1);
}
"#,
@@ -302,7 +304,7 @@ fn main() {
r#"
fn main() {
let mut x = &mut 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
*x = 5;
}
"#,
@@ -346,7 +348,7 @@ fn main() {
r#"
//- minicore: copy, builtin_impls
fn clone(mut i: &!) -> ! {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
*i
}
"#,
@@ -360,7 +362,7 @@ fn main() {
//- minicore: option
fn main() {
let mut v = &mut Some(2);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let _ = || match v {
Some(k) => {
*k = 5;
@@ -386,7 +388,7 @@ fn main() {
fn main() {
match (2, 3) {
(x, mut y) => {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x = 7;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
}
@@ -407,7 +409,7 @@ fn main() {
fn main() {
return;
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -417,7 +419,7 @@ fn main() {
fn main() {
loop {}
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -438,7 +440,7 @@ fn main(b: bool) {
g();
}
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -452,7 +454,7 @@ fn main(b: bool) {
return;
}
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -466,7 +468,7 @@ fn main(b: bool) {
fn f(_: i32) {}
fn main() {
let mut x;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x = 5;
f(x);
}
@@ -477,7 +479,7 @@ fn main() {
fn f(_: i32) {}
fn main(b: bool) {
let mut x;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
if b {
x = 1;
} else {
@@ -552,15 +554,15 @@ fn f(_: i32) {}
fn main() {
loop {
let mut x = 1;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x);
if let mut y = 2 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(y);
}
match 3 {
mut z => f(z),
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
}
}
}
@@ -577,9 +579,9 @@ fn main() {
loop {
let c @ (
mut b,
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
mut d
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
);
a = 1;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
@@ -597,7 +599,7 @@ fn main() {
check_diagnostics(
r#"
fn f(mut x: i32) {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
}
"#,
);
@@ -640,7 +642,7 @@ fn f() {
//- minicore: iterators, copy
fn f(x: [(i32, u8); 10]) {
for (a, mut b) in x {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
}
@@ -657,9 +659,9 @@ fn f(x: [(i32, u8); 10]) {
fn f(x: [(i32, u8); 10]) {
let mut it = x.into_iter();
while let Some((a, mut b)) = it.next() {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
while let Some((c, mut d)) = it.next() {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
c = 2;
@@ -683,7 +685,7 @@ fn f() {
let x = &mut x;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
let mut x = x;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x[2] = 5;
}
"#,
@@ -711,13 +713,13 @@ impl IndexMut<usize> for Foo {
}
fn f() {
let mut x = Foo;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y = &x[2];
let x = Foo;
let y = &mut x[2];
//^💡 error: cannot mutate immutable variable `x`
let mut x = &mut Foo;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y: &mut (i32, u8) = &mut x[2];
let x = Foo;
let ref mut y = x[7];
@@ -731,7 +733,7 @@ fn f() {
}
let mut x = Foo;
let mut i = 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y = &mut x[i];
}
"#,
@@ -759,7 +761,7 @@ impl DerefMut for Foo {
}
fn f() {
let mut x = Foo;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y = &*x;
let x = Foo;
let y = &mut *x;
@@ -790,11 +792,27 @@ fn f() {
fn f(_: i32) {}
fn main() {
let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7));
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x);
}
"#,
);
+ check_diagnostics(
+ r#"
+struct Foo(i32);
+
+const X: Foo = Foo(5);
+const Y: Foo = Foo(12);
+
+const fn f(mut a: Foo) -> bool {
+ //^^^^^ 💡 warn: variable does not need to be mutable
+ match a {
+ X | Y => true,
+ _ => false,
+ }
+}
+"#,
+ );
}
#[test]
@@ -842,7 +860,7 @@ pub struct TreeLeaf {
pub fn test() {
let mut tree = Tree::Leaf(
- //^^^^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^^^^ 💡 warn: variable does not need to be mutable
TreeLeaf {
depth: 0,
data: 0
@@ -859,7 +877,7 @@ pub fn test() {
r#"
//- minicore: fn
fn fn_ref(mut x: impl Fn(u8) -> u8) -> u8 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x(2)
}
fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 {
@@ -867,11 +885,11 @@ fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 {
//^ 💡 error: cannot mutate immutable variable `x`
}
fn fn_borrow_mut(mut x: &mut impl FnMut(u8) -> u8) -> u8 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x(2)
}
fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x(2)
}
"#,
@@ -915,14 +933,14 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
//- minicore: copy, fn
fn f() {
let mut x = 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let mut y = 2;
y = 7;
let closure = || {
let mut z = 8;
z = 3;
let mut k = z;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
};
}
"#,
@@ -949,7 +967,7 @@ fn f() {
fn f() {
struct X;
let mut x = X;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let c1 = || x;
let mut x = X;
let c2 = || { x = X; x };
@@ -965,12 +983,12 @@ fn f() {
fn f() {
let mut x = &mut 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = || { *x = 2; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
let mut x = &mut 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = || { *x = 2; &x; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
@@ -979,12 +997,12 @@ fn f() {
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
let mut x = &mut 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = move || { *x = 2; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
let mut x = &mut X(1, 2);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = || { x.0 = 2; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
@@ -1001,7 +1019,7 @@ fn f() {
fn x(t: &[u8]) {
match t {
&[a, mut b] | &[a, _, mut b] => {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
@@ -1055,7 +1073,7 @@ fn f() {
*x = 7;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
let mut y = Box::new(5);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
*x = *y;
//^^^^^^^ 💡 error: cannot mutate immutable variable `x`
let x = Box::new(5);
@@ -1067,6 +1085,33 @@ fn f() {
}
#[test]
+ fn regression_15143() {
+ check_diagnostics(
+ r#"
+ trait Tr {
+ type Ty;
+ }
+
+ struct A;
+
+ impl Tr for A {
+ type Ty = (u32, i64);
+ }
+
+ struct B<T: Tr> {
+ f: <T as Tr>::Ty,
+ }
+
+ fn main(b: B<A>) {
+ let f = b.f.0;
+ f = 5;
+ //^^^^^ 💡 error: cannot mutate immutable variable `f`
+ }
+ "#,
+ );
+ }
+
+ #[test]
fn allow_unused_mut_for_identifiers_starting_with_underline() {
check_diagnostics(
r#"
@@ -1080,17 +1125,51 @@ fn main() {
}
#[test]
- fn respect_allow_unused_mut() {
- // FIXME: respect
+ fn respect_lint_attributes_for_unused_mut() {
check_diagnostics(
r#"
fn f(_: i32) {}
fn main() {
#[allow(unused_mut)]
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
f(x);
}
+
+fn main2() {
+ #[deny(unused_mut)]
+ let mut x = 2;
+ //^^^^^ 💡 error: variable does not need to be mutable
+ f(x);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+macro_rules! mac {
+ ($($x:expr),*$(,)*) => ({
+ #[allow(unused_mut)]
+ let mut vec = 2;
+ vec
+ });
+}
+
+fn main2() {
+ let mut x = mac![];
+ //^^^^^ 💡 warn: variable does not need to be mutable
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn regression_15099() {
+ check_diagnostics(
+ r#"
+//- minicore: iterator, range
+fn f() {
+ loop {}
+ for _ in 0..2 {}
+}
"#,
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index a39eceab2..a34a5824f 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -6,16 +6,17 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Assist, Diagnostic, DiagnosticsContext};
+use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: no-such-field
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
- Diagnostic::new(
- "no-such-field",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0559"),
"no such field",
- ctx.sema.diagnostics_display_range(d.field.clone().map(|it| it.into())).range,
+ d.field.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index 4cd85a479..c44d28e77 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -1,6 +1,6 @@
use either::Either;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-assoc-item
//
@@ -16,8 +16,9 @@ pub(crate) fn private_assoc_item(
.name(ctx.sema.db)
.map(|name| format!("`{}` ", name.display(ctx.sema.db)))
.unwrap_or_default();
- Diagnostic::new(
- "private-assoc-item",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0624"),
format!(
"{} {}is private",
match d.item {
@@ -27,15 +28,13 @@ pub(crate) fn private_assoc_item(
},
name,
),
- ctx.sema
- .diagnostics_display_range(d.expr_or_pat.clone().map(|it| match it {
+ d.expr_or_pat.clone().map(|it| match it {
+ Either::Left(it) => it.into(),
+ Either::Right(it) => match it {
Either::Left(it) => it.into(),
- Either::Right(it) => match it {
- Either::Left(it) => it.into(),
- Either::Right(it) => it.into(),
- },
- }))
- .range,
+ Either::Right(it) => it.into(),
+ },
+ }),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
index de7f51f69..553defcf9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -1,18 +1,19 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-field
//
// This diagnostic is triggered if the accessed field is not visible from the current module.
pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic {
// FIXME: add quickfix
- Diagnostic::new(
- "private-field",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0616"),
format!(
"field `{}` of `{}` is private",
d.field.name(ctx.sema.db).display(ctx.sema.db),
d.field.parent_def(ctx.sema.db).name(ctx.sema.db).display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index d3eda3c5e..083ef3e8d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -6,7 +6,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: replace-filter-map-next-with-find-map
//
@@ -15,12 +15,12 @@ pub(crate) fn replace_filter_map_next_with_find_map(
ctx: &DiagnosticsContext<'_>,
d: &hir::ReplaceFilterMapNextWithFindMap,
) -> Diagnostic {
- Diagnostic::new(
- "replace-filter-map-next-with-find-map",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::Clippy("filter_map_next"),
"replace filter_map(..).next() with find_map(..)",
- ctx.sema.diagnostics_display_range(InFile::new(d.file, d.next_expr.clone().into())).range,
+ InFile::new(d.file, d.next_expr.clone().into()),
)
- .severity(Severity::WeakWarning)
.with_fixes(fixes(ctx, d))
}
@@ -64,7 +64,7 @@ mod tests {
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("inactive-code".to_string());
- config.disabled.insert("unresolved-method".to_string());
+ config.disabled.insert("E0599".to_string());
check_diagnostics_with_config(config, ra_fixture)
}
@@ -139,4 +139,33 @@ fn foo() {
"#,
)
}
+
+ #[test]
+ fn respect_lint_attributes_for_clippy_equivalent() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+
+fn foo() {
+ #[allow(clippy::filter_map_next)]
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+}
+
+#[deny(clippy::filter_map_next)]
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
+
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+
+#[warn(clippy::filter_map_next)]
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
+
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index c28f98d83..15bd28c00 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -7,7 +7,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticsContext};
+use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: type-mismatch
//
@@ -39,7 +39,7 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
}
};
let mut diag = Diagnostic::new(
- "type-mismatch",
+ DiagnosticCode::RustcHardError("E0308"),
format!(
"expected {}, found {}",
d.expected.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
index e12bbcf68..4af672271 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -7,7 +7,7 @@ use ide_db::{
use syntax::AstNode;
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: typed-hole
//
@@ -26,7 +26,8 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
)
};
- Diagnostic::new("typed-hole", message, display_range.range).with_fixes(fixes)
+ Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range)
+ .with_fixes(fixes)
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs
index 034e4fcfb..7de9a9a32 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: undeclared-label
pub(crate) fn undeclared_label(
@@ -6,10 +6,11 @@ pub(crate) fn undeclared_label(
d: &hir::UndeclaredLabel,
) -> Diagnostic {
let name = &d.name;
- Diagnostic::new(
- "undeclared-label",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("undeclared-label"),
format!("use of undeclared label `{}`", name.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.node.clone().map(|it| it.into())).range,
+ d.node.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
index e879de75c..bcce72a7d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: unimplemented-builtin-macro
//
@@ -7,10 +7,10 @@ pub(crate) fn unimplemented_builtin_macro(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnimplementedBuiltinMacro,
) -> Diagnostic {
- Diagnostic::new(
- "unimplemented-builtin-macro",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::Ra("unimplemented-builtin-macro", Severity::WeakWarning),
"unimplemented built-in macro".to_string(),
- ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ d.node.clone(),
)
- .severity(Severity::WeakWarning)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 271e7ce73..e04f27c27 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -14,7 +14,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: unlinked-file
//
@@ -46,8 +46,7 @@ pub(crate) fn unlinked_file(
.unwrap_or(range);
acc.push(
- Diagnostic::new("unlinked-file", message, range)
- .severity(Severity::WeakWarning)
+ Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range)
.with_fixes(fixes),
);
}
@@ -119,10 +118,11 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
stack.pop();
'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
let crate_def_map = ctx.sema.db.crate_def_map(krate);
- let Some((_, module)) =
- crate_def_map.modules()
- .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline())
- else { continue };
+ let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
+ module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()
+ }) else {
+ continue;
+ };
if stack.is_empty() {
return make_fixes(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs
index 9fedadeae..1c5d6cd09 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unreachable-label
pub(crate) fn unreachable_label(
@@ -6,10 +6,11 @@ pub(crate) fn unreachable_label(
d: &hir::UnreachableLabel,
) -> Diagnostic {
let name = &d.name;
- Diagnostic::new(
- "unreachable-label",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0767"),
format!("use of unreachable label `{}`", name.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.node.clone().map(|it| it.into())).range,
+ d.node.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
index 74e4a69c6..f8265b632 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-extern-crate
//
@@ -7,10 +7,11 @@ pub(crate) fn unresolved_extern_crate(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedExternCrate,
) -> Diagnostic {
- Diagnostic::new(
- "unresolved-extern-crate",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("unresolved-extern-crate"),
"unresolved extern crate",
- ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ d.decl.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 5e4efa41f..0758706e4 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, AstPtr};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-field
//
@@ -22,14 +22,15 @@ pub(crate) fn unresolved_field(
} else {
""
};
- Diagnostic::new(
- "unresolved-field",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0559"),
format!(
"no field `{}` on type `{}`{method_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
.experimental()
@@ -67,7 +68,10 @@ fn method_fix(
}
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
#[test]
fn smoke_test() {
@@ -145,4 +149,11 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn no_diagnostic_for_missing_name() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_owned());
+ check_diagnostics_with_config(config, "fn foo() { (). }");
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
index e52a88459..6b8026c03 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-import
//
@@ -8,10 +8,11 @@ pub(crate) fn unresolved_import(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedImport,
) -> Diagnostic {
- Diagnostic::new(
- "unresolved-import",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0432"),
"unresolved import",
- ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ d.decl.clone().map(|it| it.into()),
)
// This currently results in false positives in the following cases:
// - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 3943b51ab..33e7c2e37 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-macro-call
//
@@ -12,7 +12,7 @@ pub(crate) fn unresolved_macro_call(
let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location);
let bang = if d.is_bang { "!" } else { "" };
Diagnostic::new(
- "unresolved-macro-call",
+ DiagnosticCode::RustcHardError("unresolved-macro-call"),
format!("unresolved macro `{}{bang}`", d.path.display(ctx.sema.db)),
display_range,
)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 8bbb837e6..ae9f6744c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, TextRange};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-method
//
@@ -22,14 +22,15 @@ pub(crate) fn unresolved_method(
} else {
""
};
- Diagnostic::new(
- "unresolved-method",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0599"),
format!(
"no method `{}` on type `{}`{field_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
.experimental()
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index 6e3fd3b42..be24e50c9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -3,7 +3,7 @@ use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSyste
use itertools::Itertools;
use syntax::AstNode;
-use crate::{fix, Diagnostic, DiagnosticsContext};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-module
//
@@ -12,8 +12,9 @@ pub(crate) fn unresolved_module(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedModule,
) -> Diagnostic {
- Diagnostic::new(
- "unresolved-module",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0583"),
match &*d.candidates {
[] => "unresolved module".to_string(),
[candidate] => format!("unresolved module, can't find module file: {candidate}"),
@@ -25,7 +26,7 @@ pub(crate) fn unresolved_module(
)
}
},
- ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ d.decl.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
}
@@ -82,8 +83,8 @@ mod baz {}
expect![[r#"
[
Diagnostic {
- code: DiagnosticCode(
- "unresolved-module",
+ code: RustcHardError(
+ "E0583",
),
message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
range: 0..8,
@@ -148,6 +149,22 @@ mod baz {}
},
],
),
+ main_node: Some(
+ InFile {
+ file_id: FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ value: MODULE@0..8
+ MOD_KW@0..3 "mod"
+ WHITESPACE@3..4 " "
+ NAME@4..7
+ IDENT@4..7 "foo"
+ SEMICOLON@7..8 ";"
+ ,
+ },
+ ),
},
]
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
index ae5cf1358..015a3d6b2 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
@@ -1,6 +1,6 @@
use hir::db::DefDatabase;
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: unresolved-proc-macro
//
@@ -41,5 +41,5 @@ pub(crate) fn unresolved_proc_macro(
};
let message = format!("{not_expanded_message}: {message}");
- Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity)
+ Diagnostic::new(DiagnosticCode::Ra("unresolved-proc-macro", severity), message, display_range)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
index 289ed0458..0aa439f79 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -1,9 +1,9 @@
use ide_db::{base_db::FileId, source_change::SourceChange};
use itertools::Itertools;
-use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use syntax::{ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode};
// Diagnostic: unnecessary-braces
//
@@ -15,6 +15,11 @@ pub(crate) fn useless_braces(
) -> Option<()> {
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
+ // If there is a `self` inside the bracketed `use`, don't show diagnostic.
+ if single_use_tree.path()?.segment()?.self_token().is_some() {
+ return Some(());
+ }
+
// If there is a comment inside the bracketed `use`,
// assume it is a commented out module path and don't show diagnostic.
if use_tree_list.has_inner_comment() {
@@ -22,21 +27,18 @@ pub(crate) fn useless_braces(
}
let use_range = use_tree_list.syntax().text_range();
- let edit = remove_braces(&single_use_tree).unwrap_or_else(|| {
- let to_replace = single_use_tree.syntax().text().to_string();
- let mut edit_builder = TextEdit::builder();
- edit_builder.delete(use_range);
- edit_builder.insert(use_range.start(), to_replace);
- edit_builder.finish()
- });
+ let to_replace = single_use_tree.syntax().text().to_string();
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(use_range);
+ edit_builder.insert(use_range.start(), to_replace);
+ let edit = edit_builder.finish();
acc.push(
Diagnostic::new(
- "unnecessary-braces",
+ DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_string(),
use_range,
)
- .severity(Severity::WeakWarning)
.with_fixes(Some(vec![fix(
"remove_braces",
"Remove unnecessary braces",
@@ -49,19 +51,12 @@ pub(crate) fn useless_braces(
Some(())
}
-fn remove_braces(single_use_tree: &ast::UseTree) -> Option<TextEdit> {
- let use_tree_list_node = single_use_tree.syntax().parent()?;
- if single_use_tree.path()?.segment()?.self_token().is_some() {
- let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
- let end = use_tree_list_node.text_range().end();
- return Some(TextEdit::delete(TextRange::new(start, end)));
- }
- None
-}
-
#[cfg(test)]
mod tests {
- use crate::tests::{check_diagnostics, check_fix};
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config, check_fix},
+ DiagnosticsConfig,
+ };
#[test]
fn test_check_unnecessary_braces_in_use_statement() {
@@ -94,6 +89,32 @@ mod a {
}
"#,
);
+ check_diagnostics(
+ r#"
+use a::{self};
+
+mod a {
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+use a::{self as cool_name};
+
+mod a {
+}
+"#,
+ );
+
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+mod a { pub mod b {} }
+use a::{b::self};
+"#,
+ );
check_fix(
r#"
mod b {}
@@ -126,16 +147,6 @@ use a::c;
);
check_fix(
r#"
-mod a {}
-use a::{self$0};
-"#,
- r#"
-mod a {}
-use a;
-"#,
- );
- check_fix(
- r#"
mod a { pub mod c {} pub mod d { pub mod e {} } }
use a::{c, d::{e$0}};
"#,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index 55a4a482d..b1b9b4b8e 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -67,24 +67,61 @@ mod handlers {
#[cfg(test)]
mod tests;
+use std::collections::HashMap;
+
use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
base_db::{FileId, FileRange, SourceDatabase},
+ generated::lints::{LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS},
imports::insert_use::InsertUseConfig,
label::Label,
source_change::SourceChange,
- FxHashSet, RootDatabase,
+ syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
+ FxHashMap, FxHashSet, RootDatabase,
+};
+use once_cell::sync::Lazy;
+use stdx::never;
+use syntax::{
+ algo::find_node_at_range,
+ ast::{self, AstNode},
+ SyntaxNode, SyntaxNodePtr, TextRange,
};
-use syntax::{algo::find_node_at_range, ast::AstNode, SyntaxNodePtr, TextRange};
// FIXME: Make this an enum
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub struct DiagnosticCode(pub &'static str);
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum DiagnosticCode {
+ RustcHardError(&'static str),
+ RustcLint(&'static str),
+ Clippy(&'static str),
+ Ra(&'static str, Severity),
+}
impl DiagnosticCode {
- pub fn as_str(&self) -> &str {
- self.0
+ pub fn url(&self) -> String {
+ match self {
+ DiagnosticCode::RustcHardError(e) => {
+ format!("https://doc.rust-lang.org/stable/error_codes/{e}.html")
+ }
+ DiagnosticCode::RustcLint(e) => {
+ format!("https://doc.rust-lang.org/rustc/?search={e}")
+ }
+ DiagnosticCode::Clippy(e) => {
+ format!("https://rust-lang.github.io/rust-clippy/master/#/{e}")
+ }
+ DiagnosticCode::Ra(e, _) => {
+ format!("https://rust-analyzer.github.io/manual.html#{e}")
+ }
+ }
+ }
+
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ DiagnosticCode::RustcHardError(r)
+ | DiagnosticCode::RustcLint(r)
+ | DiagnosticCode::Clippy(r)
+ | DiagnosticCode::Ra(r, _) => r,
+ }
}
}
@@ -97,29 +134,51 @@ pub struct Diagnostic {
pub unused: bool,
pub experimental: bool,
pub fixes: Option<Vec<Assist>>,
+ // The node that will be affected by `#[allow]` and similar attributes.
+ pub main_node: Option<InFile<SyntaxNode>>,
}
impl Diagnostic {
- fn new(code: &'static str, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ fn new(code: DiagnosticCode, message: impl Into<String>, range: TextRange) -> Diagnostic {
let message = message.into();
Diagnostic {
- code: DiagnosticCode(code),
+ code,
message,
range,
- severity: Severity::Error,
+ severity: match code {
+ DiagnosticCode::RustcHardError(_) => Severity::Error,
+ // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings.
+ DiagnosticCode::RustcLint(_) => Severity::Warning,
+ // FIXME: We can make this configurable, and if the user uses `cargo clippy` on flycheck, we can
+ // make it normal warning.
+ DiagnosticCode::Clippy(_) => Severity::WeakWarning,
+ DiagnosticCode::Ra(_, s) => s,
+ },
unused: false,
experimental: false,
fixes: None,
+ main_node: None,
}
}
+ fn new_with_syntax_node_ptr(
+ ctx: &DiagnosticsContext<'_>,
+ code: DiagnosticCode,
+ message: impl Into<String>,
+ node: InFile<SyntaxNodePtr>,
+ ) -> Diagnostic {
+ let file_id = node.file_id;
+ Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range)
+ .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id))))
+ }
+
fn experimental(mut self) -> Diagnostic {
self.experimental = true;
self
}
- fn severity(mut self, severity: Severity) -> Diagnostic {
- self.severity = severity;
+ fn with_main_node(mut self, main_node: InFile<SyntaxNode>) -> Diagnostic {
+ self.main_node = Some(main_node);
self
}
@@ -134,12 +193,12 @@ impl Diagnostic {
}
}
-#[derive(Debug, Copy, Clone)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Severity {
Error,
- // We don't actually emit this one yet, but we should at some point.
- // Warning,
+ Warning,
WeakWarning,
+ Allow,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -155,6 +214,8 @@ impl Default for ExprFillDefaultMode {
#[derive(Debug, Clone)]
pub struct DiagnosticsConfig {
+ /// Whether native diagnostics are enabled.
+ pub enabled: bool,
pub proc_macros_enabled: bool,
pub proc_attr_macros_enabled: bool,
pub disable_experimental: bool,
@@ -171,6 +232,7 @@ impl DiagnosticsConfig {
use ide_db::imports::insert_use::ImportGranularity;
Self {
+ enabled: true,
proc_macros_enabled: Default::default(),
proc_attr_macros_enabled: Default::default(),
disable_experimental: Default::default(),
@@ -194,7 +256,7 @@ struct DiagnosticsContext<'a> {
resolve: &'a AssistResolveStrategy,
}
-impl<'a> DiagnosticsContext<'a> {
+impl DiagnosticsContext<'_> {
fn resolve_precise_location(
&self,
node: &InFile<SyntaxNodePtr>,
@@ -228,11 +290,13 @@ pub fn diagnostics(
let mut res = Vec::new();
// [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
- res.extend(
- parse.errors().iter().take(128).map(|err| {
- Diagnostic::new("syntax-error", format!("Syntax Error: {err}"), err.range())
- }),
- );
+ res.extend(parse.errors().iter().take(128).map(|err| {
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("syntax-error"),
+ format!("Syntax Error: {err}"),
+ err.range(),
+ )
+ }));
let parse = sema.parse(file_id);
@@ -271,7 +335,7 @@ pub fn diagnostics(
res.extend(d.errors.iter().take(32).map(|err| {
{
Diagnostic::new(
- "syntax-error",
+ DiagnosticCode::RustcHardError("syntax-error"),
format!("Syntax Error in Expansion: {err}"),
ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
)
@@ -309,14 +373,168 @@ pub fn diagnostics(
res.push(d)
}
+ let mut diagnostics_of_range =
+ res.iter_mut().filter_map(|x| Some((x.main_node.clone()?, x))).collect::<FxHashMap<_, _>>();
+
+ let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
+ let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
+
+ handle_lint_attributes(
+ &ctx.sema,
+ parse.syntax(),
+ &mut rustc_stack,
+ &mut clippy_stack,
+ &mut diagnostics_of_range,
+ );
+
res.retain(|d| {
- !ctx.config.disabled.contains(d.code.as_str())
+ d.severity != Severity::Allow
+ && !ctx.config.disabled.contains(d.code.as_str())
&& !(ctx.config.disable_experimental && d.experimental)
});
res
}
+// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
+
+static RUSTC_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
+ Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], ""));
+
+static CLIPPY_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
+ Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
+
+fn build_group_dict(
+ lint_group: &'static [LintGroup],
+ all_groups: &'static [&'static str],
+ prefix: &'static str,
+) -> HashMap<&'static str, Vec<&'static str>> {
+ let mut r: HashMap<&str, Vec<&str>> = HashMap::new();
+ for g in lint_group {
+ for child in g.children {
+ r.entry(child.strip_prefix(prefix).unwrap())
+ .or_default()
+ .push(g.lint.label.strip_prefix(prefix).unwrap());
+ }
+ }
+ for (lint, groups) in r.iter_mut() {
+ groups.push(lint);
+ groups.extend_from_slice(all_groups);
+ }
+ r
+}
+
+fn handle_lint_attributes(
+ sema: &Semantics<'_, RootDatabase>,
+ root: &SyntaxNode,
+ rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
+ clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
+ diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>,
+) {
+ let file_id = sema.hir_file_for(root);
+ for ev in root.preorder() {
+ match ev {
+ syntax::WalkEvent::Enter(node) => {
+ for attr in node.children().filter_map(ast::Attr::cast) {
+ parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| {
+ stack.push(severity);
+ });
+ }
+ if let Some(x) =
+ diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() })
+ {
+ const EMPTY_LINTS: &[&str] = &[];
+ let (names, stack) = match x.code {
+ DiagnosticCode::RustcLint(name) => (
+ RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x),
+ &mut *rustc_stack,
+ ),
+ DiagnosticCode::Clippy(name) => (
+ CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x),
+ &mut *clippy_stack,
+ ),
+ _ => continue,
+ };
+ for &name in names {
+ if let Some(s) = stack.get(name).and_then(|x| x.last()) {
+ x.severity = *s;
+ }
+ }
+ }
+ if let Some(item) = ast::Item::cast(node.clone()) {
+ if let Some(me) = sema.expand_attr_macro(&item) {
+ for stack in [&mut *rustc_stack, &mut *clippy_stack] {
+ stack
+ .entry("__RA_EVERY_LINT".to_owned())
+ .or_default()
+ .push(Severity::Allow);
+ }
+ handle_lint_attributes(
+ sema,
+ &me,
+ rustc_stack,
+ clippy_stack,
+ diagnostics_of_range,
+ );
+ for stack in [&mut *rustc_stack, &mut *clippy_stack] {
+ stack.entry("__RA_EVERY_LINT".to_owned()).or_default().pop();
+ }
+ }
+ }
+ if let Some(mc) = ast::MacroCall::cast(node) {
+ if let Some(me) = sema.expand(&mc) {
+ handle_lint_attributes(
+ sema,
+ &me,
+ rustc_stack,
+ clippy_stack,
+ diagnostics_of_range,
+ );
+ }
+ }
+ }
+ syntax::WalkEvent::Leave(node) => {
+ for attr in node.children().filter_map(ast::Attr::cast) {
+ parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| {
+ if stack.pop() != Some(severity) {
+ never!("Mismatched serevity in walking lint attributes");
+ }
+ });
+ }
+ }
+ }
+ }
+}
+
+fn parse_lint_attribute(
+ attr: ast::Attr,
+ rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
+ clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
+ job: impl Fn(&mut Vec<Severity>, Severity),
+) {
+ let Some((tag, args_tt)) = attr.as_simple_call() else {
+ return;
+ };
+ let serevity = match tag.as_str() {
+ "allow" => Severity::Allow,
+ "warn" => Severity::Warning,
+ "forbid" | "deny" => Severity::Error,
+ _ => return,
+ };
+ for lint in parse_tt_as_comma_sep_paths(args_tt).into_iter().flatten() {
+ if let Some(lint) = lint.as_single_name_ref() {
+ job(rustc_stack.entry(lint.to_string()).or_default(), serevity);
+ }
+ if let Some(tool) = lint.qualifier().and_then(|x| x.as_single_name_ref()) {
+ if let Some(name_ref) = &lint.segment().and_then(|x| x.name_ref()) {
+ if tool.to_string() == "clippy" {
+ job(clippy_stack.entry(name_ref.to_string()).or_default(), serevity);
+ }
+ }
+ }
+ }
+}
+
fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
let mut res = unresolved_fix(id, label, target);
res.source_change = Some(source_change);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index b5cd4e0d6..ee0e03549 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -49,8 +49,11 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
- for edit in source_change.source_file_edits.values() {
+ for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut actual);
+ }
}
actual
};
@@ -114,6 +117,8 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
annotation.push_str(match d.severity {
Severity::Error => "error",
Severity::WeakWarning => "weak",
+ Severity::Warning => "warn",
+ Severity::Allow => "allow",
});
annotation.push_str(": ");
annotation.push_str(&d.message);
@@ -130,14 +135,19 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
)
}
}
- assert_eq!(expected, actual);
+ if expected != actual {
+ let fneg = expected.iter().filter(|x| !actual.contains(x)).collect::<Vec<_>>();
+ let fpos = actual.iter().filter(|x| !expected.contains(x)).collect::<Vec<_>>();
+
+ panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}");
+ }
}
}
#[test]
fn test_disabled_diagnostics() {
let mut config = DiagnosticsConfig::test_sample();
- config.disabled.insert("unresolved-module".into());
+ config.disabled.insert("E0583".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
@@ -159,7 +169,7 @@ fn minicore_smoke_test() {
let source = minicore.source_code();
let mut config = DiagnosticsConfig::test_sample();
// This should be ignored since we conditionaly remove code which creates single item use with braces
- config.disabled.insert("unnecessary-braces".to_string());
+ config.disabled.insert("unused_braces".to_string());
check_diagnostics_with_config(config, &source);
}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
index 0a85569b6..ca76d0a87 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -22,7 +22,7 @@ pub(crate) struct UsageCache {
usages: Vec<(Definition, UsageSearchResult)>,
}
-impl<'db> MatchFinder<'db> {
+impl MatchFinder<'_> {
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
/// replacement impossible, so further processing is required in order to properly nest matches
/// and remove overlapping matches. This is done in the `nesting` module.
@@ -121,7 +121,7 @@ impl<'db> MatchFinder<'db> {
// cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
// lookups in the case of a cache hit.
if usage_cache.find(&definition).is_none() {
- let usages = definition.usages(&self.sema).in_scope(self.search_scope()).all();
+ let usages = definition.usages(&self.sema).in_scope(&self.search_scope()).all();
usage_cache.usages.push((definition, usages));
return &usage_cache.usages.last().unwrap().1;
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 8112c4f72..d240127f3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -153,6 +153,9 @@ pub(crate) fn external_docs(
NameRefClass::FieldShorthand { local_ref: _, field_ref } => {
Definition::Field(field_ref)
}
+ NameRefClass::ExternCrateShorthand { decl, .. } => {
+ Definition::ExternCrateDecl(decl)
+ }
},
ast::Name(name) => match NameClass::classify(sema, &name)? {
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
@@ -209,6 +212,7 @@ pub(crate) fn resolve_doc_path_for_def(
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
+ Definition::ExternCrateDecl(it) => it.resolve_doc_path(db, link, ns),
Definition::BuiltinAttr(_)
| Definition::ToolModule(_)
| Definition::BuiltinType(_)
@@ -330,7 +334,9 @@ fn get_doc_links(
base_url.and_then(|url| url.join(path).ok())
};
- let Some((target, file, frag)) = filename_and_frag_for_def(db, def) else { return Default::default(); };
+ let Some((target, file, frag)) = filename_and_frag_for_def(db, def) else {
+ return Default::default();
+ };
let (mut web_url, mut local_url) = get_doc_base_urls(db, target, target_dir, sysroot);
@@ -615,6 +621,9 @@ fn filename_and_frag_for_def(
// FIXME fragment numbering
return Some((adt, file, Some(String::from("impl"))));
}
+ Definition::ExternCrateDecl(it) => {
+ format!("{}/index.html", it.name(db).display(db.upcast()))
+ }
Definition::Local(_)
| Definition::GenericParam(_)
| Definition::Label(_)
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index e70bc2ec5..c39c696cf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -37,11 +37,15 @@ pub(crate) fn goto_declaration(
match parent {
ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {
NameRefClass::Definition(it) => Some(it),
- NameRefClass::FieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db),
+ NameRefClass::FieldShorthand { field_ref, .. } =>
+ return field_ref.try_to_nav(db),
+ NameRefClass::ExternCrateShorthand { decl, .. } =>
+ return decl.try_to_nav(db),
},
ast::Name(name) => match NameClass::classify(&sema, &name)? {
NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it),
- NameClass::PatFieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db),
+ NameClass::PatFieldShorthand { field_ref, .. } =>
+ return field_ref.try_to_nav(db),
},
_ => None
}
@@ -53,6 +57,7 @@ pub(crate) fn goto_declaration(
Definition::Const(c) => c.as_assoc_item(db),
Definition::TypeAlias(ta) => ta.as_assoc_item(db),
Definition::Function(f) => f.as_assoc_item(db),
+ Definition::ExternCrateDecl(it) => return it.try_to_nav(db),
_ => None,
}?;
@@ -211,4 +216,30 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn goto_decl_for_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std$0;
+ /// ^^^
+//- /std/lib.rs crate:std
+// empty
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_decl_for_renamed_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+ /// ^^^
+//- /std/lib.rs crate:std
+// empty
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 4e641357e..21471ab2a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -1,6 +1,9 @@
use std::mem::discriminant;
-use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use crate::{
+ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
+ RangeInfo, TryToNav,
+};
use hir::{AsAssocItem, AssocItem, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader},
@@ -73,6 +76,13 @@ pub(crate) fn goto_definition(
.definitions()
.into_iter()
.flat_map(|def| {
+ if let Definition::ExternCrateDecl(crate_def) = def {
+ return crate_def
+ .resolved_crate(db)
+ .map(|it| it.root_module().to_nav(sema.db))
+ .into_iter()
+ .collect();
+ }
try_filter_trait_item_definition(sema, &def)
.unwrap_or_else(|| def_to_nav(sema.db, def))
})
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index a1a119629..37166bdbd 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -34,54 +34,50 @@ pub(crate) fn goto_implementation(
_ => 0,
})?;
let range = original_token.text_range();
- let navs = sema
- .descend_into_macros(original_token)
- .into_iter()
- .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
- .filter_map(|node| match &node {
- ast::NameLike::Name(name) => {
- NameClass::classify(&sema, name).map(|class| match class {
- NameClass::Definition(it) | NameClass::ConstReference(it) => it,
- NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
- Definition::Local(local_def)
+ let navs =
+ sema.descend_into_macros(original_token)
+ .into_iter()
+ .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
+ .filter_map(|node| match &node {
+ ast::NameLike::Name(name) => {
+ NameClass::classify(&sema, name).and_then(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it),
+ NameClass::PatFieldShorthand { .. } => None,
+ })
+ }
+ ast::NameLike::NameRef(name_ref) => NameRefClass::classify(&sema, name_ref)
+ .and_then(|class| match class {
+ NameRefClass::Definition(def) => Some(def),
+ NameRefClass::FieldShorthand { .. }
+ | NameRefClass::ExternCrateShorthand { .. } => None,
+ }),
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .unique()
+ .filter_map(|def| {
+ let navs = match def {
+ Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
+ Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
+ Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
+ Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
+ Definition::Function(f) => {
+ let assoc = f.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
}
- })
- }
- ast::NameLike::NameRef(name_ref) => {
- NameRefClass::classify(&sema, name_ref).map(|class| match class {
- NameRefClass::Definition(def) => def,
- NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
- Definition::Local(local_ref)
+ Definition::Const(c) => {
+ let assoc = c.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
}
- })
- }
- ast::NameLike::Lifetime(_) => None,
- })
- .unique()
- .filter_map(|def| {
- let navs = match def {
- Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
- Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
- Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
- Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
- Definition::Function(f) => {
- let assoc = f.as_assoc_item(sema.db)?;
- let name = assoc.name(sema.db)?;
- let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
- impls_for_trait_item(&sema, trait_, name)
- }
- Definition::Const(c) => {
- let assoc = c.as_assoc_item(sema.db)?;
- let name = assoc.name(sema.db)?;
- let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
- impls_for_trait_item(&sema, trait_, name)
- }
- _ => return None,
- };
- Some(navs)
- })
- .flatten()
- .collect();
+ _ => return None,
+ };
+ Some(navs)
+ })
+ .flatten()
+ .collect();
Some(RangeInfo { range, info: navs })
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 7e545491f..43e89a334 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -100,10 +100,7 @@ fn highlight_closure_captures(
.flat_map(|local| {
let usages = Definition::Local(local)
.usages(sema)
- .set_scope(Some(SearchScope::file_range(FileRange {
- file_id,
- range: search_range,
- })))
+ .in_scope(&SearchScope::file_range(FileRange { file_id, range: search_range }))
.include_self_refs()
.all()
.references
@@ -139,7 +136,7 @@ fn highlight_references(
.iter()
.filter_map(|&d| {
d.usages(sema)
- .set_scope(Some(SearchScope::single_file(file_id)))
+ .in_scope(&SearchScope::single_file(file_id))
.include_self_refs()
.all()
.references
@@ -183,7 +180,7 @@ fn highlight_references(
.filter_map(|item| {
Definition::from(item)
.usages(sema)
- .set_scope(Some(SearchScope::file_range(FileRange {
+ .set_scope(Some(&SearchScope::file_range(FileRange {
file_id,
range: trait_item_use_scope.text_range(),
})))
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 5ef6ac948..40659e6c2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -9,7 +9,7 @@ use either::Either;
use hir::{db::DefDatabase, HasSource, LangItem, Semantics};
use ide_db::{
base_db::FileRange,
- defs::{Definition, IdentClass, OperatorClass},
+ defs::{Definition, IdentClass, NameRefClass, OperatorClass},
famous_defs::FamousDefs,
helpers::pick_best_token,
FxIndexSet, RootDatabase,
@@ -186,7 +186,20 @@ fn hover_simple(
// rendering poll is very confusing
return None;
}
- Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
+ if let IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
+ decl,
+ ..
+ }) = class
+ {
+ return Some(vec![(Definition::ExternCrateDecl(decl), node)]);
+ }
+ Some(
+ class
+ .definitions()
+ .into_iter()
+ .zip(iter::once(node).cycle())
+ .collect::<Vec<_>>(),
+ )
})
.flatten()
.unique_by(|&(def, _)| def)
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 136214641..a33a6ee18 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -257,7 +257,7 @@ pub(super) fn keyword(
let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
- let docs = doc_owner.attrs(sema.db).docs()?;
+ let docs = doc_owner.docs(sema.db)?;
let markup = process_markup(
sema.db,
Definition::Module(doc_owner),
@@ -422,10 +422,10 @@ pub(super) fn definition(
|&it| {
if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) {
- Ok(x) => {
- Some(if x >= 10 { format!("{x} ({x:#X})") } else { format!("{x}") })
+ Ok(it) => {
+ Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
}
- Err(_) => it.value(db).map(|x| format!("{x:?}")),
+ Err(_) => it.value(db).map(|it| format!("{it:?}")),
}
} else {
None
@@ -437,7 +437,7 @@ pub(super) fn definition(
Definition::Const(it) => label_value_and_docs(db, it, |it| {
let body = it.render_eval(db);
match body {
- Ok(x) => Some(x),
+ Ok(it) => Some(it),
Err(_) => {
let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone();
@@ -472,6 +472,7 @@ pub(super) fn definition(
}
Definition::GenericParam(it) => label_and_docs(db, it),
Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db).display(db))),
+ Definition::ExternCrateDecl(it) => label_and_docs(db, it),
// FIXME: We should be able to show more info about these
Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
@@ -620,7 +621,7 @@ where
D: HasAttrs + HirDisplay,
{
let label = def.display(db).to_string();
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -645,7 +646,7 @@ where
) {
format_to!(label, "{layout}");
}
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -677,7 +678,7 @@ where
) {
format_to!(label, "{layout}");
}
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -696,7 +697,7 @@ where
} else {
def.display(db).to_string()
};
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -727,14 +728,14 @@ fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Optio
// std exposes prim_{} modules with docstrings on the root to document the builtins
let primitive_mod = format!("prim_{}", builtin.name().display(famous_defs.0.db));
let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
- let docs = doc_owner.attrs(famous_defs.0.db).docs()?;
+ let docs = doc_owner.docs(famous_defs.0.db)?;
markup(Some(docs.into()), builtin.name().display(famous_defs.0.db).to_string(), None)
}
fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
let db = famous_defs.0.db;
let std_crate = famous_defs.std()?;
- let std_root_module = std_crate.root_module(db);
+ let std_root_module = std_crate.root_module();
std_root_module.children(db).find(|module| {
module.name(db).map_or(false, |module| module.display(db).to_string() == name)
})
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index f75ebfa12..ddc71dffa 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -674,7 +674,7 @@ struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }
```
```rust
- field_a: u8 // size = 1, align = 1, offset = 4
+ field_a: u8 // size = 1, align = 1, offset = 6
```
"#]],
);
@@ -779,6 +779,39 @@ const foo$0: u32 = {
```
"#]],
);
+
+ check(
+ r#"const FOO$0: i32 = -2147483648;"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: i32 = -2147483648 (0x80000000)
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+ const FOO: i32 = -2147483648;
+ const BAR$0: bool = FOO > 0;
+ "#,
+ expect![[r#"
+ *BAR*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const BAR: bool = false
+ ```
+ "#]],
+ );
}
#[test]
@@ -1583,6 +1616,9 @@ fn test_hover_extern_crate() {
check(
r#"
//- /main.rs crate:main deps:std
+//! Crate docs
+
+/// Decl docs!
extern crate st$0d;
//- /std/lib.rs crate:std
//! Standard library for this test
@@ -1591,23 +1627,32 @@ extern crate st$0d;
//! abc123
"#,
expect![[r#"
- *std*
+ *std*
- ```rust
- extern crate std
- ```
+ ```rust
+ main
+ ```
- ---
+ ```rust
+ extern crate std
+ ```
+
+ ---
- Standard library for this test
+ Decl docs!
- Printed?
- abc123
- "#]],
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
);
check(
r#"
//- /main.rs crate:main deps:std
+//! Crate docs
+
+/// Decl docs!
extern crate std as ab$0c;
//- /std/lib.rs crate:std
//! Standard library for this test
@@ -1616,19 +1661,25 @@ extern crate std as ab$0c;
//! abc123
"#,
expect![[r#"
- *abc*
+ *abc*
- ```rust
- extern crate std
- ```
+ ```rust
+ main
+ ```
- ---
+ ```rust
+ extern crate std as abc
+ ```
- Standard library for this test
+ ---
- Printed?
- abc123
- "#]],
+ Decl docs!
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
index 10bee2a6a..6d6bd315e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
@@ -259,7 +259,7 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool,
}
})() else {
never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr);
- return (true, true)
+ return (true, true);
};
// At this point
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 84eac16b9..b621a8dda 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -474,7 +474,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9287..9295,
+ range: 9289..9297,
},
),
tooltip: "",
@@ -487,7 +487,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9319..9323,
+ range: 9321..9325,
},
),
tooltip: "",
@@ -511,7 +511,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9287..9295,
+ range: 9289..9297,
},
),
tooltip: "",
@@ -524,7 +524,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9319..9323,
+ range: 9321..9325,
},
),
tooltip: "",
@@ -548,7 +548,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9287..9295,
+ range: 9289..9297,
},
),
tooltip: "",
@@ -561,7 +561,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9319..9323,
+ range: 9321..9325,
},
),
tooltip: "",
diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
index cbcbb4b09..d06ffd535 100644
--- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
@@ -34,13 +34,15 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
_ => return None,
};
let span_formatter = |file_id, text_range: TextRange| {
- let line_col = db.line_index(file_id).line_col(text_range.start());
let path = &db
.source_root(db.file_source_root(file_id))
.path_for_file(&file_id)
.map(|x| x.to_string());
let path = path.as_deref().unwrap_or("<unknown file>");
- format!("file://{path}#{}:{}", line_col.line + 1, line_col.col)
+ match db.line_index(file_id).try_line_col(text_range.start()) {
+ Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col),
+ None => format!("file://{path} range {:?}", text_range),
+ }
};
Some(def.eval(db, span_formatter))
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index f195f78b3..bf77d55d5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -60,6 +60,7 @@ mod interpret_function;
mod view_item_tree;
mod shuffle_crate_graph;
mod fetch_crates;
+mod view_memory_layout;
use std::ffi::OsStr;
@@ -74,6 +75,7 @@ use ide_db::{
};
use syntax::SourceFile;
use triomphe::Arc;
+use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout};
use crate::navigation_target::{ToNav, TryToNav};
@@ -125,7 +127,7 @@ pub use ide_db::{
label::Label,
line_index::{LineCol, LineIndex},
search::{ReferenceCategory, SearchScope},
- source_change::{FileSystemEdit, SourceChange},
+ source_change::{FileSystemEdit, SnippetEdit, SourceChange},
symbol_index::Query,
RootDatabase, SymbolKind,
};
@@ -642,7 +644,7 @@ impl Analysis {
};
self.with_db(|db| {
- let diagnostic_assists = if include_fixes {
+ let diagnostic_assists = if diagnostics_config.enabled && include_fixes {
ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
.into_iter()
.flat_map(|it| it.fixes.unwrap_or_default())
@@ -724,6 +726,13 @@ impl Analysis {
self.with_db(|db| move_item::move_item(db, range, direction))
}
+ pub fn get_recursive_memory_layout(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RecursiveMemoryLayout>> {
+ self.with_db(|db| view_memory_layout(db, position))
+ }
+
/// Performs an operation on the database that may be canceled.
///
/// rust-analyzer needs to be able to answer semantic questions about the
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index 0d57e63d2..17f3771b1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -177,6 +177,17 @@ pub(crate) fn def_to_moniker(
});
}
+ // Qualify locals/parameters by their parent definition name.
+ if let Definition::Local(it) = def {
+ let parent_name = it.parent(db).name(db);
+ if let Some(name) = parent_name {
+ description.push(MonikerDescriptor {
+ name: name.display(db).to_string(),
+ desc: MonikerDescriptorKind::Method,
+ });
+ }
+ }
+
let name_desc = match def {
// These are handled by top-level guard (for performance).
Definition::GenericParam(_)
@@ -247,6 +258,10 @@ pub(crate) fn def_to_moniker(
name: s.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Meta,
},
+ Definition::ExternCrateDecl(m) => MonikerDescriptor {
+ name: m.name(db).display(db).to_string(),
+ desc: MonikerDescriptorKind::Namespace,
+ },
};
description.push(name_desc);
@@ -320,7 +335,7 @@ use foo::module::func;
fn main() {
func$0();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func() {}
}
@@ -336,7 +351,7 @@ use foo::module::func;
fn main() {
func();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func$0() {}
}
@@ -351,7 +366,7 @@ pub mod module {
fn moniker_for_trait() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func$0() {}
@@ -368,7 +383,7 @@ pub mod module {
fn moniker_for_trait_constant() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
const MY_CONST$0: u8;
@@ -385,7 +400,7 @@ pub mod module {
fn moniker_for_trait_type() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
type MyType$0;
@@ -402,7 +417,7 @@ pub mod module {
fn moniker_for_trait_impl_function() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func() {}
@@ -430,7 +445,7 @@ use foo::St;
fn main() {
let x = St { a$0: 2 };
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub struct St {
pub a: i32,
}
@@ -450,7 +465,7 @@ use foo::module::func;
fn main() {
func();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func() {
let x$0 = 2;
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 385c1b0c0..d1479dd1e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -102,7 +102,7 @@ impl NavigationTarget {
full_range,
SymbolKind::Module,
);
- res.docs = module.attrs(db).docs();
+ res.docs = module.docs(db);
res.description = Some(module.display(db).to_string());
return res;
}
@@ -217,6 +217,7 @@ impl TryToNav for Definition {
Definition::Trait(it) => it.try_to_nav(db),
Definition::TraitAlias(it) => it.try_to_nav(db),
Definition::TypeAlias(it) => it.try_to_nav(db),
+ Definition::ExternCrateDecl(it) => Some(it.try_to_nav(db)?),
Definition::BuiltinType(_) => None,
Definition::ToolModule(_) => None,
Definition::BuiltinAttr(_) => None,
@@ -357,13 +358,11 @@ impl ToNav for hir::Module {
impl TryToNav for hir::Impl {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let InFile { file_id, value } = self.source(db)?;
- let derive_attr = self.is_builtin_derive(db);
+ let derive_attr = self.as_builtin_derive(db);
- let focus = if derive_attr.is_some() { None } else { value.self_ty() };
-
- let syntax = match &derive_attr {
- Some(attr) => attr.value.syntax(),
- None => value.syntax(),
+ let (focus, syntax) = match &derive_attr {
+ Some(attr) => (None, attr.value.syntax()),
+ None => (value.self_ty(), value.syntax()),
};
let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
@@ -377,6 +376,30 @@ impl TryToNav for hir::Impl {
}
}
+impl TryToNav for hir::ExternCrateDecl {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+ let InFile { file_id, value } = src;
+ let focus = value
+ .rename()
+ .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right));
+ let (file_id, full_range, focus_range) =
+ orig_range_with_focus(db, file_id, value.syntax(), focus);
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res.container_name = container_name(db, *self);
+ Some(res)
+ }
+}
+
impl TryToNav for hir::Field {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let src = self.source(db)?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index fdc5261ac..813f9ed94 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -74,7 +74,7 @@ pub(crate) fn find_all_refs(
}
});
let mut usages =
- def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
+ def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all();
if literal_search {
retain_adt_literal_usages(&mut usages, def, sema);
@@ -137,6 +137,9 @@ pub(crate) fn find_defs<'a>(
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
Definition::Local(local_ref)
}
+ NameRefClass::ExternCrateShorthand { decl, .. } => {
+ Definition::ExternCrateDecl(decl)
+ }
}
}
ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index e10c46381..dae8e71e8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -145,7 +145,14 @@ fn find_definitions(
if name
.syntax()
.parent()
- .map_or(false, |it| ast::Rename::can_cast(it.kind())) =>
+ .map_or(false, |it| ast::Rename::can_cast(it.kind()))
+ // FIXME: uncomment this once we resolve to usages to extern crate declarations
+ // && name
+ // .syntax()
+ // .ancestors()
+ // .nth(2)
+ // .map_or(true, |it| !ast::ExternCrate::can_cast(it.kind()))
+ =>
{
bail!("Renaming aliases is currently unsupported")
}
@@ -165,7 +172,12 @@ fn find_definitions(
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
Definition::Local(local_ref)
}
+ NameRefClass::ExternCrateShorthand { decl, .. } => {
+ Definition::ExternCrateDecl(decl)
+ }
})
+ // FIXME: uncomment this once we resolve to usages to extern crate declarations
+ .filter(|def| !matches!(def, Definition::ExternCrateDecl(..)))
.ok_or_else(|| format_err!("No references found at position"))
.and_then(|def| {
// if the name differs from the definitions name it has to be an alias
@@ -367,7 +379,7 @@ mod tests {
let mut file_id: Option<FileId> = None;
for edit in source_change.source_file_edits {
file_id = Some(edit.0);
- for indel in edit.1.into_iter() {
+ for indel in edit.1 .0.into_iter() {
text_edit_builder.replace(indel.delete, indel.insert);
}
}
@@ -895,14 +907,17 @@ mod foo$0;
source_file_edits: {
FileId(
1,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 4..7,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -944,24 +959,30 @@ use crate::foo$0::FooContent;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "quux",
- delete: 8..11,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 8..11,
+ },
+ ],
+ },
+ None,
+ ),
FileId(
2,
- ): TextEdit {
- indels: [
- Indel {
- insert: "quux",
- delete: 11..14,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 11..14,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -997,14 +1018,17 @@ mod fo$0o;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 4..7,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveDir {
@@ -1047,14 +1071,17 @@ mod outer { mod fo$0o; }
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "bar",
- delete: 16..19,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "bar",
+ delete: 16..19,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1120,24 +1147,30 @@ pub mod foo$0;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 27..30,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 27..30,
+ },
+ ],
+ },
+ None,
+ ),
FileId(
1,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 8..11,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 8..11,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1187,14 +1220,17 @@ mod quux;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 4..7,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1325,18 +1361,21 @@ pub fn baz() {}
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "r#fn",
- delete: 4..7,
- },
- Indel {
- insert: "r#fn",
- delete: 22..25,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "r#fn",
+ delete: 4..7,
+ },
+ Indel {
+ insert: "r#fn",
+ delete: 22..25,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1395,18 +1434,21 @@ pub fn baz() {}
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo",
- delete: 4..8,
- },
- Indel {
- insert: "foo",
- delete: 23..27,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo",
+ delete: 4..8,
+ },
+ Indel {
+ insert: "foo",
+ delete: 23..27,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -2487,4 +2529,109 @@ fn main() {
",
)
}
+
+ #[test]
+ fn extern_crate() {
+ check_prepare(
+ r"
+//- /lib.rs crate:main deps:foo
+extern crate foo$0;
+use foo as qux;
+//- /foo.rs crate:foo
+",
+ expect![[r#"No references found at position"#]],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
+ // check(
+ // "bar",
+ // r"
+ // //- /lib.rs crate:main deps:foo
+ // extern crate foo$0;
+ // use foo as qux;
+ // //- /foo.rs crate:foo
+ // ",
+ // r"
+ // extern crate foo as bar;
+ // use bar as qux;
+ // ",
+ // );
+ }
+
+ #[test]
+ fn extern_crate_rename() {
+ check_prepare(
+ r"
+//- /lib.rs crate:main deps:foo
+extern crate foo as qux$0;
+use qux as frob;
+//- /foo.rs crate:foo
+",
+ expect!["Renaming aliases is currently unsupported"],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate
+ // declarations
+ // check(
+ // "bar",
+ // r"
+ // //- /lib.rs crate:main deps:foo
+ // extern crate foo as qux$0;
+ // use qux as frob;
+ // //- /foo.rs crate:foo
+ // ",
+ // r"
+ // extern crate foo as bar;
+ // use bar as frob;
+ // ",
+ // );
+ }
+
+ #[test]
+ fn extern_crate_self() {
+ check_prepare(
+ r"
+extern crate self$0;
+use self as qux;
+",
+ expect!["No references found at position"],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
+ // check(
+ // "bar",
+ // r"
+ // extern crate self$0;
+ // use self as qux;
+ // ",
+ // r"
+ // extern crate self as bar;
+ // use self as qux;
+ // ",
+ // );
+ }
+
+ #[test]
+ fn extern_crate_self_rename() {
+ check_prepare(
+ r"
+//- /lib.rs crate:main deps:foo
+extern crate self as qux$0;
+use qux as frob;
+//- /foo.rs crate:foo
+",
+ expect!["Renaming aliases is currently unsupported"],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
+ // check(
+ // "bar",
+ // r"
+ // //- /lib.rs crate:main deps:foo
+ // extern crate self as qux$0;
+ // use qux as frob;
+ // //- /foo.rs crate:foo
+ // ",
+ // r"
+ // extern crate self as bar;
+ // use bar as frob;
+ // ",
+ // );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 27ad63d82..5f87a7855 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -2,7 +2,7 @@ use std::fmt;
use ast::HasName;
use cfg::CfgExpr;
-use hir::{AsAssocItem, HasAttrs, HasSource, Semantics};
+use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},
@@ -14,7 +14,7 @@ use ide_db::{
use itertools::Itertools;
use stdx::{always, format_to};
use syntax::{
- ast::{self, AstNode, HasAttrs as _},
+ ast::{self, AstNode},
SmolStr, SyntaxNode,
};
@@ -232,7 +232,7 @@ fn find_related_tests(
for def in defs {
let defs = def
.usages(sema)
- .set_scope(search_scope.clone())
+ .set_scope(search_scope.as_ref())
.all()
.references
.into_values()
@@ -307,10 +307,9 @@ pub(crate) fn runnable_fn(
sema: &Semantics<'_, RootDatabase>,
def: hir::Function,
) -> Option<Runnable> {
- let func = def.source(sema.db)?;
let name = def.name(sema.db).to_smol_str();
- let root = def.module(sema.db).krate().root_module(sema.db);
+ let root = def.module(sema.db).krate().root_module();
let kind = if name == "main" && def.module(sema.db) == root {
RunnableKind::Bin
@@ -323,10 +322,10 @@ pub(crate) fn runnable_fn(
canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
};
- if test_related_attribute(&func.value).is_some() {
- let attr = TestAttr::from_fn(&func.value);
+ if def.is_test(sema.db) {
+ let attr = TestAttr::from_fn(sema.db, def);
RunnableKind::Test { test_id: test_id(), attr }
- } else if func.value.has_atom_attr("bench") {
+ } else if def.is_bench(sema.db) {
RunnableKind::Bench { test_id: test_id() }
} else {
return None;
@@ -335,7 +334,7 @@ pub(crate) fn runnable_fn(
let nav = NavigationTarget::from_named(
sema.db,
- func.as_ref().map(|it| it as &dyn ast::HasName),
+ def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName),
SymbolKind::Function,
);
let cfg = def.attrs(sema.db).cfg();
@@ -487,12 +486,8 @@ pub struct TestAttr {
}
impl TestAttr {
- fn from_fn(fn_def: &ast::Fn) -> TestAttr {
- let ignore = fn_def
- .attrs()
- .filter_map(|attr| attr.simple_name())
- .any(|attribute_text| attribute_text == "ignore");
- TestAttr { ignore }
+ fn from_fn(db: &dyn HirDatabase, fn_def: hir::Function) -> TestAttr {
+ TestAttr { ignore: fn_def.is_ignore(db) }
}
}
@@ -594,6 +589,9 @@ fn main() {}
#[test]
fn test_foo() {}
+#[::core::prelude::v1::test]
+fn test_full_path() {}
+
#[test]
#[ignore]
fn test_foo() {}
@@ -605,7 +603,7 @@ mod not_a_root {
fn main() {}
}
"#,
- &[TestMod, Bin, Test, Test, Bench],
+ &[TestMod, Bin, Test, Test, Test, Bench],
expect![[r#"
[
Runnable {
@@ -614,7 +612,7 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 0..137,
+ full_range: 0..190,
name: "",
kind: Module,
},
@@ -664,8 +662,29 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 41..75,
- focus_range: 62..70,
+ full_range: 41..92,
+ focus_range: 73..87,
+ name: "test_full_path",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_full_path",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 94..128,
+ focus_range: 115..123,
name: "test_foo",
kind: Function,
},
@@ -685,8 +704,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 77..99,
- focus_range: 89..94,
+ full_range: 130..152,
+ focus_range: 142..147,
name: "bench",
kind: Function,
},
diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
index deaf3c9c4..d8d81869a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
@@ -126,14 +126,17 @@ mod tests {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "3",
- delete: 33..34,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: false,
@@ -163,24 +166,30 @@ mod tests {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "3",
- delete: 33..34,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ None,
+ ),
FileId(
1,
- ): TextEdit {
- indels: [
- Indel {
- insert: "3",
- delete: 11..12,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 11..12,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: false,
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index 3e3d9f8f8..d8696198d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -3,13 +3,14 @@
use std::collections::HashMap;
-use hir::{db::HirDatabase, Crate, Module, Semantics};
+use hir::{db::HirDatabase, Crate, Module};
+use ide_db::helpers::get_definition;
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
- defs::{Definition, IdentClass},
+ defs::Definition,
FxHashSet, RootDatabase,
};
-use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
+use syntax::{AstNode, SyntaxKind::*, TextRange, T};
use crate::{
hover::hover_for_definition,
@@ -73,7 +74,7 @@ impl TokenStore {
}
pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
- self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
+ self.0.into_iter().enumerate().map(|(id, data)| (TokenId(id), data))
}
}
@@ -87,7 +88,7 @@ pub struct StaticIndexedFile {
fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
let mut worklist: Vec<_> =
- Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ Crate::all(db).into_iter().map(|krate| krate.root_module()).collect();
let mut modules = Vec::new();
while let Some(module) = worklist.pop() {
@@ -132,9 +133,9 @@ impl StaticIndex<'_> {
// hovers
let sema = hir::Semantics::new(self.db);
let tokens_or_nodes = sema.parse(file_id).syntax().clone();
- let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
+ let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it {
syntax::NodeOrToken::Node(_) => None,
- syntax::NodeOrToken::Token(x) => Some(x),
+ syntax::NodeOrToken::Token(it) => Some(it),
});
let hover_config = HoverConfig {
links_in_hover: true,
@@ -154,28 +155,29 @@ impl StaticIndex<'_> {
let range = token.text_range();
let node = token.parent().unwrap();
let def = match get_definition(&sema, token.clone()) {
- Some(x) => x,
+ Some(it) => it,
None => continue,
};
- let id = if let Some(x) = self.def_map.get(&def) {
- *x
+ let id = if let Some(it) = self.def_map.get(&def) {
+ *it
} else {
- let x = self.tokens.insert(TokenStaticData {
+ let it = self.tokens.insert(TokenStaticData {
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
- definition: def
- .try_to_nav(self.db)
- .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
+ definition: def.try_to_nav(self.db).map(|it| FileRange {
+ file_id: it.file_id,
+ range: it.focus_or_full_range(),
+ }),
references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
});
- self.def_map.insert(def, x);
- x
+ self.def_map.insert(def, it);
+ it
};
let token = self.tokens.get_mut(id).unwrap();
token.references.push(ReferenceData {
range: FileRange { range, file_id },
is_definition: match def.try_to_nav(self.db) {
- Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
+ Some(it) => it.file_id == file_id && it.focus_or_full_range() == range,
None => false,
},
});
@@ -187,7 +189,7 @@ impl StaticIndex<'_> {
pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
let db = &*analysis.db;
let work = all_modules(db).into_iter().filter(|module| {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
!source_root.is_library
@@ -201,7 +203,7 @@ impl StaticIndex<'_> {
};
let mut visited_files = FxHashSet::default();
for module in work {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
if visited_files.contains(&file_id) {
continue;
}
@@ -213,16 +215,6 @@ impl StaticIndex<'_> {
}
}
-fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
- for token in sema.descend_into_macros(token) {
- let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
- if let Some(&[x]) = def.as_deref() {
- return Some(x);
- }
- }
- None
-}
-
#[cfg(test)]
mod tests {
use crate::{fixture, StaticIndex};
@@ -233,14 +225,14 @@ mod tests {
fn check_all_ranges(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);
- let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
for f in s.files {
for (range, _) in f.tokens {
- let x = FileRange { file_id: f.file_id, range };
- if !range_set.contains(&x) {
- panic!("additional range {x:?}");
+ let it = FileRange { file_id: f.file_id, range };
+ if !range_set.contains(&it) {
+ panic!("additional range {it:?}");
}
- range_set.remove(&x);
+ range_set.remove(&it);
}
}
if !range_set.is_empty() {
@@ -251,17 +243,17 @@ mod tests {
fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);
- let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
for (_, t) in s.tokens.iter() {
- if let Some(x) = t.definition {
- if x.range.start() == TextSize::from(0) {
+ if let Some(t) = t.definition {
+ if t.range.start() == TextSize::from(0) {
// ignore definitions that are whole of file
continue;
}
- if !range_set.contains(&x) {
- panic!("additional definition {x:?}");
+ if !range_set.contains(&t) {
+ panic!("additional definition {t:?}");
}
- range_set.remove(&x);
+ range_set.remove(&t);
}
}
if !range_set.is_empty() {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index dc06591ff..ae9723640 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -24,7 +24,7 @@ use syntax::{
use crate::{
syntax_highlighting::{
- escape::{highlight_escape_char, highlight_escape_string},
+ escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string},
format::highlight_format_string,
highlights::Highlights,
macro_::MacroHighlighter,
@@ -265,10 +265,14 @@ fn traverse(
// set macro and attribute highlighting states
match event.clone() {
- Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ Enter(NodeOrToken::Node(node))
+ if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
+ {
tt_level += 1;
}
- Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ Leave(NodeOrToken::Node(node))
+ if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
+ {
tt_level -= 1;
}
Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
@@ -387,7 +391,7 @@ fn traverse(
};
let descended_element = if in_macro {
// Attempt to descend tokens into macro-calls.
- match element {
+ let res = match element {
NodeOrToken::Token(token) if token.kind() != COMMENT => {
let token = match attr_or_derive_item {
Some(AttrOrDerive::Attr(_)) => {
@@ -412,7 +416,8 @@ fn traverse(
}
}
e => e,
- }
+ };
+ res
} else {
element
};
@@ -466,6 +471,14 @@ fn traverse(
};
highlight_escape_char(hl, &char, range.start())
+ } else if ast::Byte::can_cast(token.kind())
+ && ast::Byte::can_cast(descended_token.kind())
+ {
+ let Some(byte) = ast::Byte::cast(token) else {
+ continue;
+ };
+
+ highlight_escape_byte(hl, &byte, range.start())
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
index 211e35880..5913ca5e4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
@@ -1,7 +1,7 @@
//! Syntax highlighting for escape sequences
use crate::syntax_highlighting::highlights::Highlights;
use crate::{HlRange, HlTag};
-use syntax::ast::{Char, IsString};
+use syntax::ast::{Byte, Char, IsString};
use syntax::{AstToken, TextRange, TextSize};
pub(super) fn highlight_escape_string<T: IsString>(
@@ -10,14 +10,14 @@ pub(super) fn highlight_escape_string<T: IsString>(
start: TextSize,
) {
string.escaped_char_ranges(&mut |piece_range, char| {
- if char.is_err() {
- return;
- }
-
if string.text()[piece_range.start().into()..].starts_with('\\') {
+ let highlight = match char {
+ Ok(_) => HlTag::EscapeSequence,
+ Err(_) => HlTag::InvalidEscapeSequence,
+ };
stack.add(HlRange {
range: piece_range + start,
- highlight: HlTag::EscapeSequence.into(),
+ highlight: highlight.into(),
binding_hash: None,
});
}
@@ -26,6 +26,9 @@ pub(super) fn highlight_escape_string<T: IsString>(
pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start: TextSize) {
if char.value().is_none() {
+ // We do not emit invalid escapes highlighting here. The lexer would likely be in a bad
+ // state and this token contains junks, since `'` is not a reliable delimiter (consider
+ // lifetimes). Nonetheless, parser errors should already be emitted.
return;
}
@@ -43,3 +46,24 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start:
TextRange::new(start + TextSize::from(1), start + TextSize::from(text.len() as u32 + 1));
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
}
+
+pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start: TextSize) {
+ if byte.value().is_none() {
+ // See `highlight_escape_char` for why no error highlighting here.
+ return;
+ }
+
+ let text = byte.text();
+ if !text.starts_with("b'") || !text.ends_with('\'') {
+ return;
+ }
+
+ let text = &text[2..text.len() - 1];
+ if !text.starts_with('\\') {
+ return;
+ }
+
+ let range =
+ TextRange::new(start + TextSize::from(2), start + TextSize::from(text.len() as u32 + 2));
+ stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 3c40246a6..8e96bfa01 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -269,7 +269,26 @@ fn highlight_name_ref(
h
}
- NameRefClass::FieldShorthand { .. } => SymbolKind::Field.into(),
+ NameRefClass::FieldShorthand { field_ref, .. } => {
+ highlight_def(sema, krate, field_ref.into())
+ }
+ NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => {
+ let mut h = HlTag::Symbol(SymbolKind::Module).into();
+
+ if resolved_krate != krate {
+ h |= HlMod::Library
+ }
+ let is_public = decl.visibility(db) == hir::Visibility::Public;
+ if is_public {
+ h |= HlMod::Public
+ }
+ let is_from_builtin_crate = resolved_krate.is_builtin(db);
+ if is_from_builtin_crate {
+ h |= HlMod::DefaultLibrary;
+ }
+ h |= HlMod::CrateRoot;
+ h
+ }
};
h.tag = match name_ref.token_kind() {
@@ -474,6 +493,14 @@ fn highlight_def(
}
h
}
+ Definition::ExternCrateDecl(extern_crate) => {
+ let mut highlight =
+ Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot;
+ if extern_crate.alias(db).is_none() {
+ highlight |= HlMod::Library;
+ }
+ highlight
+ }
Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)),
Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)),
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index 2c7823069..bbc6b55a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -109,6 +109,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
";
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index 901df147d..2657a6414 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -288,7 +288,7 @@ fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::Stri
fn module_def_to_hl_tag(def: Definition) -> HlTag {
let symbol = match def {
- Definition::Module(_) => SymbolKind::Module,
+ Definition::Module(_) | Definition::ExternCrateDecl(_) => SymbolKind::Module,
Definition::Function(_) => SymbolKind::Function,
Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
index f98310911..6d4cdd0ef 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -29,6 +29,7 @@ pub enum HlTag {
Comment,
EscapeSequence,
FormatSpecifier,
+ InvalidEscapeSequence,
Keyword,
NumericLiteral,
Operator(HlOperator),
@@ -166,6 +167,7 @@ impl HlTag {
HlTag::CharLiteral => "char_literal",
HlTag::Comment => "comment",
HlTag::EscapeSequence => "escape_sequence",
+ HlTag::InvalidEscapeSequence => "invalid_escape_sequence",
HlTag::FormatSpecifier => "format_specifier",
HlTag::Keyword => "keyword",
HlTag::Punctuation(punct) => match punct {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
index 9ed65fbc8..4dcbfe4eb 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">not_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
index 567ab8ccc..bf5505caf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">allow</span><span class="parenthesis attribute">(</span><span class="none attribute">dead_code</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="tool_module attribute library">rustfmt</span><span class="operator attribute">::</span><span class="tool_module attribute library">skip</span><span class="attribute_bracket attribute">]</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
index 1e4c06df7..0d1b3c1f1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">foo</span><span class="semicolon">;</span>
<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
index 5d66f832d..dd1528ed0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 35f240d42..d5f92aa5d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
<span class="comment documentation">//! ```</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
index 87b9da46e..88a008796 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -40,8 +40,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">std</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">alloc</span> <span class="keyword">as</span> <span class="module crate_root default_library declaration library">abc</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">alloc</span> <span class="keyword">as</span> <span class="module crate_root declaration">abc</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="unresolved_reference">unresolved</span> <span class="keyword">as</span> <span class="module crate_root declaration">definitely_unresolved</span><span class="semicolon">;</span>
</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index 6b049f379..bdeb09d2f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
index d9c3db6fb..f9c33b8a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="punctuation">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
index 3900959be..2043752bc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
@@ -40,9 +40,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span><span class="semicolon">;</span>
<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
index f98e0b1cd..ec39998de 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index 2cbbf6964..06b66b302 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace macro">{</span>
<span class="brace macro">{</span>
@@ -89,8 +90,18 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">include</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="macro">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="unresolved_reference">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, {}!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
-<span class="brace">}</span></code></pre> \ No newline at end of file
+<span class="brace">}</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
index 8a1d69816..4dcf8e5f0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
<span class="comment documentation">//! This is an intra doc injection test for modules</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
index c4c3e3dc2..084bbf2f7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
<span class="comment documentation">/// This is an intra doc injection test for modules</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
index 2369071ae..1af4bcfbd 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="numeric_literal">1</span> <span class="arithmetic">+</span> <span class="numeric_literal">1</span> <span class="arithmetic">-</span> <span class="numeric_literal">1</span> <span class="arithmetic">*</span> <span class="numeric_literal">1</span> <span class="arithmetic">/</span> <span class="numeric_literal">1</span> <span class="arithmetic">%</span> <span class="numeric_literal">1</span> <span class="bitwise">|</span> <span class="numeric_literal">1</span> <span class="bitwise">&</span> <span class="numeric_literal">1</span> <span class="logical">!</span> <span class="numeric_literal">1</span> <span class="bitwise">^</span> <span class="numeric_literal">1</span> <span class="bitwise">&gt;&gt;</span> <span class="numeric_literal">1</span> <span class="bitwise">&lt;&lt;</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
index bff35c897..ec18c3ea1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index fa374b04f..3ac8aa9cc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">println</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="brace">{</span>
@@ -105,6 +106,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\x65</span><span class="char_literal">'</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\x00</span><span class="char_literal">'</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="byte_literal">b'</span><span class="escape_sequence">\xFF</span><span class="byte_literal">'</span><span class="semicolon">;</span>
+
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="escape_sequence">{{</span><span class="string_literal macro">Hello</span><span class="escape_sequence">}}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="comment">// from https://doc.rust-lang.org/std/fmt/index.html</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello"</span>
@@ -159,9 +162,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello</span><span class="escape_sequence">\n</span><span class="string_literal macro">World"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="escape_sequence">\u{48}</span><span class="escape_sequence">\x65</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6F</span><span class="string_literal macro"> World"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="invalid_escape_sequence">\xFF</span><span class="escape_sequence">\u{FF}</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// invalid non-UTF8 escape sequences</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\xFF</span><span class="invalid_escape_sequence">\u{FF}</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// valid bytes, invalid unicodes</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">c"</span><span class="escape_sequence">\u{FF}</span><span class="escape_sequence">\xFF</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// valid bytes, valid unicodes</span>
+ <span class="keyword">let</span> <span class="variable declaration reference">backslash</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">A</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
@@ -173,6 +177,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="operator macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="unresolved_reference macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="none macro">toho</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 654d51b8a..c72ea54e9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">id</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 497992f68..8749d355c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -48,6 +48,7 @@ fn macros() {
check_highlighting(
r#"
//- proc_macros: mirror
+//- /lib.rs crate:lib
proc_macros::mirror! {
{
,i32 :x pub
@@ -95,11 +96,23 @@ macro without_args {
}
}
+#[rustc_builtin_macro]
+macro_rules! concat {}
+#[rustc_builtin_macro]
+macro_rules! include {}
+#[rustc_builtin_macro]
+macro_rules! format_args {}
+
+include!(concat!("foo/", "foo.rs"));
+
fn main() {
- println!("Hello, {}!", 92);
+ format_args!("Hello, {}!", 92);
dont_color_me_braces!();
noop!(noop!(1));
}
+//- /foo/foo.rs crate:foo
+mod foo {}
+use self::foo as bar;
"#,
expect_file!["./test_data/highlight_macros.html"],
false,
@@ -451,6 +464,8 @@ fn main() {
let a = '\x65';
let a = '\x00';
+ let a = b'\xFF';
+
println!("Hello {{Hello}}");
// from https://doc.rust-lang.org/std/fmt/index.html
println!("Hello"); // => "Hello"
@@ -505,9 +520,10 @@ fn main() {
println!("Hello\nWorld");
println!("\u{48}\x65\x6C\x6C\x6F World");
- let _ = "\x28\x28\x00\x63\n";
- let _ = b"\x28\x28\x00\x63\n";
- let _ = r"\\";
+ let _ = "\x28\x28\x00\x63\xFF\u{FF}\n"; // invalid non-UTF8 escape sequences
+ let _ = b"\x28\x28\x00\x63\xFF\u{FF}\n"; // valid bytes, invalid unicodes
+ let _ = c"\u{FF}\xFF"; // valid bytes, valid unicodes
+ let backslash = r"\\";
println!("{\x41}", A = 92);
println!("{ничоси}", ничоси = 92);
@@ -520,7 +536,7 @@ fn main() {
toho!("{}fmt", 0);
asm!("mov eax, {0}");
format_args!(concat!("{}"), "{}");
- format_args!("{}", format_args!("{}", 0));
+ format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
}"#,
expect_file!["./test_data/highlight_strings.html"],
false,
@@ -788,6 +804,7 @@ fn test_extern_crate() {
//- /main.rs crate:main deps:std,alloc
extern crate std;
extern crate alloc as abc;
+extern crate unresolved as definitely_unresolved;
//- /std/lib.rs crate:std
pub struct S;
//- /alloc/lib.rs crate:alloc
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
new file mode 100644
index 000000000..2f6332abd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
@@ -0,0 +1,409 @@
+use std::fmt;
+
+use hir::{Field, HirDisplay, Layout, Semantics, Type};
+use ide_db::{
+ defs::Definition,
+ helpers::{get_definition, pick_best_token},
+ RootDatabase,
+};
+use syntax::{AstNode, SyntaxKind};
+
+use crate::FilePosition;
+
+pub struct MemoryLayoutNode {
+ pub item_name: String,
+ pub typename: String,
+ pub size: u64,
+ pub alignment: u64,
+ pub offset: u64,
+ pub parent_idx: i64,
+ pub children_start: i64,
+ pub children_len: u64,
+}
+
+pub struct RecursiveMemoryLayout {
+ pub nodes: Vec<MemoryLayoutNode>,
+}
+
+// NOTE: this is currently strictly for testing and so isn't super useful as a visualization tool, however it could be adapted to become one?
+impl fmt::Display for RecursiveMemoryLayout {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fn process(
+ fmt: &mut fmt::Formatter<'_>,
+ nodes: &Vec<MemoryLayoutNode>,
+ idx: usize,
+ depth: usize,
+ ) -> fmt::Result {
+ let mut out = "\t".repeat(depth);
+ let node = &nodes[idx];
+ out += &format!(
+ "{}: {} (size: {}, align: {}, field offset: {})\n",
+ node.item_name, node.typename, node.size, node.alignment, node.offset
+ );
+ write!(fmt, "{}", out)?;
+ if node.children_start != -1 {
+ for j in nodes[idx].children_start
+ ..(nodes[idx].children_start + nodes[idx].children_len as i64)
+ {
+ process(fmt, nodes, j as usize, depth + 1)?;
+ }
+ }
+ Ok(())
+ }
+
+ process(fmt, &self.nodes, 0, 0)
+ }
+}
+
+enum FieldOrTupleIdx {
+ Field(Field),
+ TupleIdx(usize),
+}
+
+impl FieldOrTupleIdx {
+ fn name(&self, db: &RootDatabase) -> String {
+ match *self {
+ FieldOrTupleIdx::Field(f) => f
+ .name(db)
+ .as_str()
+ .map(|s| s.to_owned())
+ .unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())),
+ FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(),
+ }
+ }
+
+ fn index(&self) -> usize {
+ match *self {
+ FieldOrTupleIdx::Field(f) => f.index(),
+ FieldOrTupleIdx::TupleIdx(i) => i,
+ }
+ }
+}
+
+// Feature: View Memory Layout
+//
+// Displays the recursive memory layout of a datatype.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **rust-analyzer: View Memory Layout**
+// |===
+pub(crate) fn view_memory_layout(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RecursiveMemoryLayout> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+ let token =
+ pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ SyntaxKind::IDENT => 3,
+ _ => 0,
+ })?;
+
+ let def = get_definition(&sema, token)?;
+
+ let ty = match def {
+ Definition::Adt(it) => it.ty(db),
+ Definition::TypeAlias(it) => it.ty(db),
+ Definition::BuiltinType(it) => it.ty(db),
+ Definition::SelfType(it) => it.self_ty(db),
+ Definition::Local(it) => it.ty(db),
+ Definition::Field(it) => it.ty(db),
+ Definition::Const(it) => it.ty(db),
+ Definition::Static(it) => it.ty(db),
+ _ => return None,
+ };
+
+ fn read_layout(
+ nodes: &mut Vec<MemoryLayoutNode>,
+ db: &RootDatabase,
+ ty: &Type,
+ layout: &Layout,
+ parent_idx: usize,
+ ) {
+ let mut fields = ty
+ .fields(db)
+ .into_iter()
+ .map(|(f, ty)| (FieldOrTupleIdx::Field(f), ty))
+ .chain(
+ ty.tuple_fields(db)
+ .into_iter()
+ .enumerate()
+ .map(|(i, ty)| (FieldOrTupleIdx::TupleIdx(i), ty)),
+ )
+ .collect::<Vec<_>>();
+
+ if fields.len() == 0 {
+ return;
+ }
+
+ fields.sort_by_key(|(f, _)| layout.field_offset(f.index()).unwrap());
+
+ let children_start = nodes.len();
+ nodes[parent_idx].children_start = children_start as i64;
+ nodes[parent_idx].children_len = fields.len() as u64;
+
+ for (field, child_ty) in fields.iter() {
+ if let Ok(child_layout) = child_ty.layout(db) {
+ nodes.push(MemoryLayoutNode {
+ item_name: field.name(db),
+ typename: child_ty.display(db).to_string(),
+ size: child_layout.size(),
+ alignment: child_layout.align(),
+ offset: layout.field_offset(field.index()).unwrap_or(0),
+ parent_idx: parent_idx as i64,
+ children_start: -1,
+ children_len: 0,
+ });
+ } else {
+ nodes.push(MemoryLayoutNode {
+ item_name: field.name(db)
+ + format!("(no layout data: {:?})", child_ty.layout(db).unwrap_err())
+ .as_ref(),
+ typename: child_ty.display(db).to_string(),
+ size: 0,
+ offset: 0,
+ alignment: 0,
+ parent_idx: parent_idx as i64,
+ children_start: -1,
+ children_len: 0,
+ });
+ }
+ }
+
+ for (i, (_, child_ty)) in fields.iter().enumerate() {
+ if let Ok(child_layout) = child_ty.layout(db) {
+ read_layout(nodes, db, &child_ty, &child_layout, children_start + i);
+ }
+ }
+ }
+
+ ty.layout(db)
+ .map(|layout| {
+ let item_name = match def {
+ // def is a datatype
+ Definition::Adt(_)
+ | Definition::TypeAlias(_)
+ | Definition::BuiltinType(_)
+ | Definition::SelfType(_) => "[ROOT]".to_owned(),
+
+ // def is an item
+ def => def
+ .name(db)
+ .map(|n| {
+ n.as_str()
+ .map(|s| s.to_owned())
+ .unwrap_or_else(|| format!(".{}", n.as_tuple_index().unwrap()))
+ })
+ .unwrap_or("[ROOT]".to_owned()),
+ };
+
+ let typename = ty.display(db).to_string();
+
+ let mut nodes = vec![MemoryLayoutNode {
+ item_name,
+ typename: typename.clone(),
+ size: layout.size(),
+ offset: 0,
+ alignment: layout.align(),
+ parent_idx: -1,
+ children_start: -1,
+ children_len: 0,
+ }];
+ read_layout(&mut nodes, db, &ty, &layout, 0);
+
+ RecursiveMemoryLayout { nodes }
+ })
+ .ok()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::fixture;
+ use expect_test::expect;
+
+ fn make_memory_layout(ra_fixture: &str) -> Option<RecursiveMemoryLayout> {
+ let (analysis, position, _) = fixture::annotations(ra_fixture);
+
+ view_memory_layout(&analysis.db, position)
+ }
+
+ #[test]
+ fn view_memory_layout_none() {
+ assert!(make_memory_layout(r#"$0"#).is_none());
+ assert!(make_memory_layout(r#"stru$0ct Blah {}"#).is_none());
+ }
+
+ #[test]
+ fn view_memory_layout_primitive() {
+ expect![[r#"
+ foo: i32 (size: 4, align: 4, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+fn main() {
+ let foo$0 = 109; // default i32
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_constant() {
+ expect![[r#"
+ BLAH: bool (size: 1, align: 1, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+const BLAH$0: bool = 0;
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_static() {
+ expect![[r#"
+ BLAH: bool (size: 1, align: 1, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+static BLAH$0: bool = 0;
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_tuple() {
+ expect![[r#"
+ x: (f64, u8, i64) (size: 24, align: 8, field offset: 0)
+ .0: f64 (size: 8, align: 8, field offset: 0)
+ .1: u8 (size: 1, align: 1, field offset: 8)
+ .2: i64 (size: 8, align: 8, field offset: 16)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+fn main() {
+ let x$0 = (101.0, 111u8, 119i64);
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_c_struct() {
+ expect![[r#"
+ [ROOT]: Blah (size: 16, align: 4, field offset: 0)
+ a: u32 (size: 4, align: 4, field offset: 0)
+ b: (i32, u8) (size: 8, align: 4, field offset: 4)
+ .0: i32 (size: 4, align: 4, field offset: 0)
+ .1: u8 (size: 1, align: 1, field offset: 4)
+ c: i8 (size: 1, align: 1, field offset: 12)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+#[repr(C)]
+struct Blah$0 {
+ a: u32,
+ b: (i32, u8),
+ c: i8,
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_struct() {
+ expect![[r#"
+ [ROOT]: Blah (size: 16, align: 4, field offset: 0)
+ b: (i32, u8) (size: 8, align: 4, field offset: 0)
+ .0: i32 (size: 4, align: 4, field offset: 0)
+ .1: u8 (size: 1, align: 1, field offset: 4)
+ a: u32 (size: 4, align: 4, field offset: 8)
+ c: i8 (size: 1, align: 1, field offset: 12)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+struct Blah$0 {
+ a: u32,
+ b: (i32, u8),
+ c: i8,
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_member() {
+ expect![[r#"
+ a: bool (size: 1, align: 1, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+#[repr(C)]
+struct Oof {
+ a$0: bool,
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_alias() {
+ let ml_a = make_memory_layout(
+ r#"
+struct X {
+ a: u32,
+ b: i8,
+ c: (f32, f32),
+}
+
+type Foo$0 = X;
+"#,
+ )
+ .unwrap();
+
+ let ml_b = make_memory_layout(
+ r#"
+struct X$0 {
+ a: u32,
+ b: i8,
+ c: (f32, f32),
+}
+"#,
+ )
+ .unwrap();
+
+ assert_eq!(ml_a.to_string(), ml_b.to_string());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml
index dcd0d7881..4d56c7719 100644
--- a/src/tools/rust-analyzer/crates/intern/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
[dependencies]
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.4.0", features = ["raw-api"] }
-hashbrown = { version = "0.12.1", default-features = false }
+hashbrown.workspace = true
once_cell = "1.17.0"
rustc-hash = "1.1.0"
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
new file mode 100644
index 000000000..f041ca88a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
@@ -0,0 +1,25 @@
+[package]
+name = "load-cargo"
+version = "0.0.0"
+description = "TBD"
+
+rust-version.workspace = true
+edition.workspace = true
+license.workspace = true
+authors.workspace = true
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.62"
+crossbeam-channel = "0.5.5"
+itertools = "0.10.5"
+tracing = "0.1.35"
+
+ide.workspace = true
+ide-db.workspace =true
+proc-macro-api.workspace = true
+project-model.workspace = true
+tt.workspace = true
+vfs.workspace = true
+vfs-notify.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
new file mode 100644
index 000000000..7a795dd62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -0,0 +1,441 @@
+//! Loads a Cargo project into a static instance of analysis, without support
+//! for incorporating changes.
+// Note, don't remove any public api from this. This API is consumed by external tools
+// to run rust-analyzer as a library.
+use std::{collections::hash_map::Entry, mem, path::Path, sync};
+
+use ::tt::token_id as tt;
+use crossbeam_channel::{unbounded, Receiver};
+use ide::{AnalysisHost, Change, SourceRoot};
+use ide_db::{
+ base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, ProcMacros,
+ },
+ FxHashMap,
+};
+use itertools::Itertools;
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
+use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
+
+pub struct LoadCargoConfig {
+ pub load_out_dirs_from_check: bool,
+ pub with_proc_macro_server: ProcMacroServerChoice,
+ pub prefill_caches: bool,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ProcMacroServerChoice {
+ Sysroot,
+ Explicit(AbsPathBuf),
+ None,
+}
+
+pub fn load_workspace_at(
+ root: &Path,
+ cargo_config: &CargoConfig,
+ load_config: &LoadCargoConfig,
+ progress: &dyn Fn(String),
+) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
+ let root = ProjectManifest::discover_single(&root)?;
+ let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
+
+ if load_config.load_out_dirs_from_check {
+ let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
+ workspace.set_build_scripts(build_scripts)
+ }
+
+ load_workspace(workspace, &cargo_config.extra_env, load_config)
+}
+
+pub fn load_workspace(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, String>,
+ load_config: &LoadCargoConfig,
+) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let (sender, receiver) = unbounded();
+ let mut vfs = vfs::Vfs::default();
+ let mut loader = {
+ let loader =
+ vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ Box::new(loader)
+ };
+
+ let proc_macro_server = match &load_config.with_proc_macro_server {
+ ProcMacroServerChoice::Sysroot => ws
+ .find_sysroot_proc_macro_srv()
+ .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)),
+ ProcMacroServerChoice::Explicit(path) => {
+ ProcMacroServer::spawn(path.clone()).map_err(Into::into)
+ }
+ ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")),
+ };
+
+ let (crate_graph, proc_macros) = ws.to_crate_graph(
+ &mut |path: &AbsPath| {
+ let contents = loader.load_sync(path);
+ let path = vfs::VfsPath::from(path.to_path_buf());
+ vfs.set_file_contents(path.clone(), contents);
+ vfs.file_id(&path)
+ },
+ extra_env,
+ );
+ let proc_macros = {
+ let proc_macro_server = match &proc_macro_server {
+ Ok(it) => Ok(it),
+ Err(e) => Err(e.to_string()),
+ };
+ proc_macros
+ .into_iter()
+ .map(|(crate_id, path)| {
+ (
+ crate_id,
+ path.map_or_else(
+ |_| Err("proc macro crate is missing dylib".to_owned()),
+ |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ },
+ ),
+ )
+ })
+ .collect()
+ };
+
+ let project_folders = ProjectFolders::new(&[ws], &[]);
+ loader.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch: vec![],
+ version: 0,
+ });
+
+ let host = load_crate_graph(
+ crate_graph,
+ proc_macros,
+ project_folders.source_root_config,
+ &mut vfs,
+ &receiver,
+ );
+
+ if load_config.prefill_caches {
+ host.analysis().parallel_prime_caches(1, |_| {})?;
+ }
+ Ok((host, vfs, proc_macro_server.ok()))
+}
+
+#[derive(Default)]
+pub struct ProjectFolders {
+ pub load: Vec<vfs::loader::Entry>,
+ pub watch: Vec<usize>,
+ pub source_root_config: SourceRootConfig,
+}
+
+impl ProjectFolders {
+ pub fn new(workspaces: &[ProjectWorkspace], global_excludes: &[AbsPathBuf]) -> ProjectFolders {
+ let mut res = ProjectFolders::default();
+ let mut fsc = FileSetConfig::builder();
+ let mut local_filesets = vec![];
+
+ // Dedup source roots
+ // Depending on the project setup, we can have duplicated source roots, or for example in
+ // the case of the rustc workspace, we can end up with two source roots that are almost the
+ // same but not quite, like:
+ // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
+ // PackageRoot {
+ // is_local: true,
+ // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
+ // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
+ // }
+ //
+ // The first one comes from the explicit rustc workspace which points to the rustc workspace itself
+ // The second comes from the rustc workspace that we load as the actual project workspace
+ // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
+ // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
+ // so we need to also coalesce the includes if they overlap.
+
+ let mut roots: Vec<_> = workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .update(|root| root.include.sort())
+ .sorted_by(|a, b| a.include.cmp(&b.include))
+ .collect();
+
+ // map that tracks indices of overlapping roots
+ let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
+ let mut done = false;
+
+ while !mem::replace(&mut done, true) {
+ // maps include paths to indices of the corresponding root
+ let mut include_to_idx = FxHashMap::default();
+ // Find and note down the indices of overlapping roots
+ for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
+ for include in &root.include {
+ match include_to_idx.entry(include) {
+ Entry::Occupied(e) => {
+ overlap_map.entry(*e.get()).or_default().push(idx);
+ }
+ Entry::Vacant(e) => {
+ e.insert(idx);
+ }
+ }
+ }
+ }
+ for (k, v) in overlap_map.drain() {
+ done = false;
+ for v in v {
+ let r = mem::replace(
+ &mut roots[v],
+ PackageRoot { is_local: false, include: vec![], exclude: vec![] },
+ );
+ roots[k].is_local |= r.is_local;
+ roots[k].include.extend(r.include);
+ roots[k].exclude.extend(r.exclude);
+ }
+ roots[k].include.sort();
+ roots[k].exclude.sort();
+ roots[k].include.dedup();
+ roots[k].exclude.dedup();
+ }
+ }
+
+ for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
+ let file_set_roots: Vec<VfsPath> =
+ root.include.iter().cloned().map(VfsPath::from).collect();
+
+ let entry = {
+ let mut dirs = vfs::loader::Directories::default();
+ dirs.extensions.push("rs".into());
+ dirs.include.extend(root.include);
+ dirs.exclude.extend(root.exclude);
+ for excl in global_excludes {
+ if dirs
+ .include
+ .iter()
+ .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
+ {
+ dirs.exclude.push(excl.clone());
+ }
+ }
+
+ vfs::loader::Entry::Directories(dirs)
+ };
+
+ if root.is_local {
+ res.watch.push(res.load.len());
+ }
+ res.load.push(entry);
+
+ if root.is_local {
+ local_filesets.push(fsc.len());
+ }
+ fsc.add_file_set(file_set_roots)
+ }
+
+ let fsc = fsc.build();
+ res.source_root_config = SourceRootConfig { fsc, local_filesets };
+
+ res
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct SourceRootConfig {
+ pub fsc: FileSetConfig,
+ pub local_filesets: Vec<usize>,
+}
+
+impl SourceRootConfig {
+ pub fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
+ self.fsc
+ .partition(vfs)
+ .into_iter()
+ .enumerate()
+ .map(|(idx, file_set)| {
+ let is_local = self.local_filesets.contains(&idx);
+ if is_local {
+ SourceRoot::new_local(file_set)
+ } else {
+ SourceRoot::new_library(file_set)
+ }
+ })
+ .collect()
+ }
+}
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub fn load_proc_macro(
+ server: &ProcMacroServer,
+ path: &AbsPath,
+ dummy_replace: &[Box<str>],
+) -> ProcMacroLoadResult {
+ let res: Result<Vec<_>, String> = (|| {
+ let dylib = MacroDylib::new(path.to_path_buf());
+ let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ if vec.is_empty() {
+ return Err("proc macro library returned no proc macros".to_string());
+ }
+ Ok(vec
+ .into_iter()
+ .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+ .collect())
+ })();
+ match res {
+ Ok(proc_macros) => {
+ tracing::info!(
+ "Loaded proc-macros for {path}: {:?}",
+ proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
+ );
+ Ok(proc_macros)
+ }
+ Err(e) => {
+ tracing::warn!("proc-macro loading for {path} failed: {e}");
+ Err(e)
+ }
+ }
+}
+
+fn load_crate_graph(
+ crate_graph: CrateGraph,
+ proc_macros: ProcMacros,
+ source_root_config: SourceRootConfig,
+ vfs: &mut vfs::Vfs,
+ receiver: &Receiver<vfs::loader::Message>,
+) -> AnalysisHost {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
+ let mut host = AnalysisHost::new(lru_cap);
+ let mut analysis_change = Change::new();
+
+ host.raw_database_mut().enable_proc_attr_macros();
+
+ // wait until Vfs has loaded all roots
+ for task in receiver {
+ match task {
+ vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
+ if n_done == n_total {
+ break;
+ }
+ }
+ vfs::loader::Message::Loaded { files } => {
+ for (path, contents) in files {
+ vfs.set_file_contents(path.into(), contents);
+ }
+ }
+ }
+ }
+ let changes = vfs.take_changes();
+ for file in changes {
+ if file.exists() {
+ let contents = vfs.file_contents(file.file_id);
+ if let Ok(text) = std::str::from_utf8(contents) {
+ analysis_change.change_file(file.file_id, Some(text.into()))
+ }
+ }
+ }
+ let source_roots = source_root_config.partition(vfs);
+ analysis_change.set_roots(source_roots);
+
+ analysis_change.set_crate_graph(crate_graph);
+ analysis_change.set_proc_macros(proc_macros);
+
+ host.apply_change(analysis_change);
+ host
+}
+
+fn expander_to_proc_macro(
+ expander: proc_macro_api::ProcMacro,
+ dummy_replace: &[Box<str>],
+) -> ProcMacro {
+ let name = From::from(expander.name());
+ let kind = match expander.kind() {
+ proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+ proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+ proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+ };
+ let expander: sync::Arc<dyn ProcMacroExpander> =
+ if dummy_replace.iter().any(|replace| &**replace == name) {
+ match kind {
+ ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
+ _ => sync::Arc::new(EmptyExpander),
+ }
+ } else {
+ sync::Arc::new(Expander(expander))
+ };
+ ProcMacro { name, kind, expander }
+}
+
+#[derive(Debug)]
+struct Expander(proc_macro_api::ProcMacro);
+
+impl ProcMacroExpander for Expander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ attrs: Option<&tt::Subtree>,
+ env: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
+ }
+}
+
+/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
+#[derive(Debug)]
+struct IdentityExpander;
+
+impl ProcMacroExpander for IdentityExpander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+}
+
+/// Empty expander, used for proc-macros that are deliberately ignored by the user.
+#[derive(Debug)]
+struct EmptyExpander;
+
+impl ProcMacroExpander for EmptyExpander {
+ fn expand(
+ &self,
+ _: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(tt::Subtree::empty())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::SourceDatabase;
+
+ use super::*;
+
+ #[test]
+ fn test_loading_rust_analyzer() {
+ let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro_server: ProcMacroServerChoice::None,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
+
+ let n_crates = host.raw_database().crate_graph().iter().count();
+ // RA has quite a few crates, but the exact count doesn't matter
+ assert!(n_crates > 20);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index d28dd17de..9d43e1304 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -20,10 +20,7 @@ fn benchmark_parse_macro_rules() {
let rules = macro_rules_fixtures_tt();
let hash: usize = {
let _pt = bench("mbe parse macro rules");
- rules
- .values()
- .map(|it| DeclarativeMacro::parse_macro_rules(it, true).unwrap().rules.len())
- .sum()
+ rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum()
};
assert_eq!(hash, 1144);
}
@@ -41,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
- let res = rules[&id].expand(&tt);
+ let res = rules[&id].expand(tt);
assert!(res.err.is_none());
res.value.token_trees.len()
})
@@ -53,7 +50,7 @@ fn benchmark_expand_macro_rules() {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
macro_rules_fixtures_tt()
.into_iter()
- .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true).unwrap()))
+ .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
@@ -105,7 +102,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
- if it.expand(&subtree).err.is_none() {
+ if it.expand(subtree.clone()).err.is_none() {
res.push((name.clone(), subtree));
break;
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index 8e2181e97..f2d89d3ef 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -123,4 +123,14 @@ enum Fragment {
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either.
Expr(tt::TokenTree),
+ /// There are roughly two types of paths: paths in expression context, where a
+ /// separator `::` between an identifier and its following generic argument list
+ /// is mandatory, and paths in type context, where `::` can be omitted.
+ ///
+ /// Unlike rustc, we need to transform the parsed fragments back into tokens
+ /// during transcription. When the matched path fragment is a type-context path
+ /// and is trasncribed as an expression-context path, verbatim transcription
+ /// would cause a syntax error. We need to fix it up just before transcribing;
+ /// see `transcriber::fix_up_and_push_path_tt()`.
+ Path(tt::TokenTree),
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 474826079..1471af98b 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -742,7 +742,11 @@ fn match_meta_var(
is_2021: bool,
) -> ExpandResult<Option<Fragment>> {
let fragment = match kind {
- MetaVarKind::Path => parser::PrefixEntryPoint::Path,
+ MetaVarKind::Path => {
+ return input
+ .expect_fragment(parser::PrefixEntryPoint::Path)
+ .map(|it| it.map(Fragment::Path));
+ }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
MetaVarKind::Pat => parser::PrefixEntryPoint::Pat,
@@ -771,7 +775,7 @@ fn match_meta_var(
.expect_fragment(parser::PrefixEntryPoint::Expr)
.map(|tt| tt.map(Fragment::Expr));
}
- _ => {
+ MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind {
MetaVarKind::Ident => input
.expect_ident()
@@ -799,7 +803,7 @@ fn match_meta_var(
})
.map_err(|()| ExpandError::binding_error("expected literal"))
}
- _ => Err(ExpandError::UnexpectedToken),
+ _ => unreachable!(),
};
return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
}
@@ -884,7 +888,7 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
-impl<'a> TtIter<'a> {
+impl TtIter<'_> {
fn expect_separator(&mut self, separator: &Separator) -> bool {
let mut fork = self.clone();
let ok = match separator {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index 6161af185..cdac2f1e3 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -400,7 +400,8 @@ fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
}
buf.push(tt.into())
}
- Fragment::Tokens(tt) | Fragment::Expr(tt) => buf.push(tt),
+ Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt),
+ Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt),
}
}
@@ -411,6 +412,45 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
}
}
+/// Inserts the path separator `::` between an identifier and its following generic
+/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
+/// we need this fixup.
+fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
+ stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
+ let mut prev_was_ident = false;
+ // Note that we only need to fix up the top-level `TokenTree`s because the
+ // context of the paths in the descendant `Subtree`s won't be changed by the
+ // mbe transcription.
+ for tt in subtree.token_trees {
+ if prev_was_ident {
+ // Pedantically, `(T) -> U` in `FnOnce(T) -> U` is treated as a generic
+ // argument list and thus needs `::` between it and `FnOnce`. However in
+ // today's Rust this type of path *semantically* cannot appear as a
+ // top-level expression-context path, so we can safely ignore it.
+ if let tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '<', .. })) = tt {
+ buf.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ':',
+ spacing: tt::Spacing::Joint,
+ span: tt::Span::unspecified(),
+ })
+ .into(),
+ );
+ buf.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ':',
+ spacing: tt::Spacing::Alone,
+ span: tt::Span::unspecified(),
+ })
+ .into(),
+ );
+ }
+ }
+ prev_was_ident = matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(_)));
+ buf.push(tt);
+ }
+}
+
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
/// defined by the metavar expression.
fn count(
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 5ef20ff8a..9d886a1c9 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -28,13 +28,13 @@ use crate::{
tt_iter::TtIter,
};
-// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use self::tt::{Delimiter, DelimiterKind, Punct};
pub use ::parser::TopEntryPoint;
pub use crate::{
syntax_bridge::{
- parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
+ parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map,
+ syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree,
syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
SyntheticTokenId,
},
@@ -131,6 +131,7 @@ pub struct DeclarativeMacro {
// This is used for correctly determining the behavior of the pat fragment
// FIXME: This should be tracked by hygiene of the fragment identifier!
is_2021: bool,
+ err: Option<Box<ParseError>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -206,81 +207,118 @@ impl Shift {
}
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Origin {
Def,
Call,
}
impl DeclarativeMacro {
+ pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
+ DeclarativeMacro {
+ rules: Box::default(),
+ shift: Shift(0),
+ is_2021,
+ err: Some(Box::new(err)),
+ }
+ }
+
/// The old, `macro_rules! m {}` flavor.
- pub fn parse_macro_rules(
- tt: &tt::Subtree,
- is_2021: bool,
- ) -> Result<DeclarativeMacro, ParseError> {
+ pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
+ let mut err = None;
+
while src.len() > 0 {
- let rule = Rule::parse(&mut src, true)?;
+ let rule = match Rule::parse(&mut src, true) {
+ Ok(it) => it,
+ Err(e) => {
+ err = Some(Box::new(e));
+ break;
+ }
+ };
rules.push(rule);
if let Err(()) = src.expect_char(';') {
if src.len() > 0 {
- return Err(ParseError::expected("expected `;`"));
+ err = Some(Box::new(ParseError::expected("expected `;`")));
}
break;
}
}
for Rule { lhs, .. } in &rules {
- validate(lhs)?;
+ if let Err(e) = validate(lhs) {
+ err = Some(Box::new(e));
+ break;
+ }
}
- Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 })
+ DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
}
/// The new, unstable `macro m {}` flavor.
- pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> Result<DeclarativeMacro, ParseError> {
+ pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
+ let mut err = None;
if tt::DelimiterKind::Brace == tt.delimiter.kind {
cov_mark::hit!(parse_macro_def_rules);
while src.len() > 0 {
- let rule = Rule::parse(&mut src, true)?;
+ let rule = match Rule::parse(&mut src, true) {
+ Ok(it) => it,
+ Err(e) => {
+ err = Some(Box::new(e));
+ break;
+ }
+ };
rules.push(rule);
if let Err(()) = src.expect_any_char(&[';', ',']) {
if src.len() > 0 {
- return Err(ParseError::expected("expected `;` or `,` to delimit rules"));
+ err = Some(Box::new(ParseError::expected(
+ "expected `;` or `,` to delimit rules",
+ )));
}
break;
}
}
} else {
cov_mark::hit!(parse_macro_def_simple);
- let rule = Rule::parse(&mut src, false)?;
- if src.len() != 0 {
- return Err(ParseError::expected("remaining tokens in macro def"));
+ match Rule::parse(&mut src, false) {
+ Ok(rule) => {
+ if src.len() != 0 {
+ err = Some(Box::new(ParseError::expected("remaining tokens in macro def")));
+ }
+ rules.push(rule);
+ }
+ Err(e) => {
+ err = Some(Box::new(e));
+ }
}
- rules.push(rule);
}
for Rule { lhs, .. } in &rules {
- validate(lhs)?;
+ if let Err(e) = validate(lhs) {
+ err = Some(Box::new(e));
+ break;
+ }
}
- Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 })
+ DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
}
- pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
- // apply shift
- let mut tt = tt.clone();
+ pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
self.shift.shift_all(&mut tt);
expander::expand_rules(&self.rules, &tt, self.is_2021)
}
+ pub fn err(&self) -> Option<&ParseError> {
+ self.err.as_deref()
+ }
+
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
self.shift.shift(id)
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 8cbf0f8fc..62b2accf5 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -53,6 +53,37 @@ pub fn syntax_node_to_token_tree_with_modifications(
(subtree, c.id_alloc.map, c.id_alloc.next_id)
}
+/// Convert the syntax node to a `TokenTree` (what macro
+/// will consume).
+pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap {
+ syntax_node_to_token_map_with_modifications(
+ node,
+ Default::default(),
+ 0,
+ Default::default(),
+ Default::default(),
+ )
+ .0
+}
+
+/// Convert the syntax node to a `TokenTree` (what macro will consume)
+/// with the censored range excluded.
+pub fn syntax_node_to_token_map_with_modifications(
+ node: &SyntaxNode,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+) -> (TokenMap, u32) {
+ let global_offset = node.text_range().start();
+ let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
+ collect_tokens(&mut c);
+ c.id_alloc.map.shrink_to_fit();
+ always!(c.replace.is_empty(), "replace: {:?}", c.replace);
+ always!(c.append.is_empty(), "append: {:?}", c.append);
+ (c.id_alloc.map, c.id_alloc.next_id)
+}
+
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct SyntheticTokenId(pub u32);
@@ -327,6 +358,111 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
}
}
+fn collect_tokens<C: TokenConverter>(conv: &mut C) {
+ struct StackEntry {
+ idx: usize,
+ open_range: TextRange,
+ delimiter: tt::DelimiterKind,
+ }
+
+ let entry = StackEntry {
+ delimiter: tt::DelimiterKind::Invisible,
+ // never used (delimiter is `None`)
+ idx: !0,
+ open_range: TextRange::empty(TextSize::of('.')),
+ };
+ let mut stack = NonEmptyVec::new(entry);
+
+ loop {
+ let StackEntry { delimiter, .. } = stack.last_mut();
+ let (token, range) = match conv.bump() {
+ Some(it) => it,
+ None => break,
+ };
+ let synth_id = token.synthetic_id(conv);
+
+ let kind = token.kind(conv);
+ if kind == COMMENT {
+ // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
+ // figure out which token id to use for the doc comment, if it is converted successfully.
+ let next_id = conv.id_alloc().peek_next_id();
+ if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) {
+ let id = conv.id_alloc().alloc(range, synth_id);
+ debug_assert_eq!(id, next_id);
+ }
+ continue;
+ }
+ if kind.is_punct() && kind != UNDERSCORE {
+ if synth_id.is_none() {
+ assert_eq!(range.len(), TextSize::of('.'));
+ }
+
+ let expected = match delimiter {
+ tt::DelimiterKind::Parenthesis => Some(T![')']),
+ tt::DelimiterKind::Brace => Some(T!['}']),
+ tt::DelimiterKind::Bracket => Some(T![']']),
+ tt::DelimiterKind::Invisible => None,
+ };
+
+ if let Some(expected) = expected {
+ if kind == expected {
+ if let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, Some(range));
+ }
+ continue;
+ }
+ }
+
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ if let Some(kind) = delim {
+ let (_id, idx) = conv.id_alloc().open_delim(range, synth_id);
+
+ stack.push(StackEntry { idx, open_range: range, delimiter: kind });
+ continue;
+ }
+
+ conv.id_alloc().alloc(range, synth_id);
+ } else {
+ macro_rules! make_leaf {
+ ($i:ident) => {{
+ conv.id_alloc().alloc(range, synth_id);
+ }};
+ }
+ match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let char_unit = TextSize::of('\'');
+ let r = TextRange::at(range.start(), char_unit);
+ conv.id_alloc().alloc(r, synth_id);
+
+ let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
+ conv.id_alloc().alloc(r, synth_id);
+ continue;
+ }
+ _ => continue,
+ };
+ };
+
+ // If we get here, we've consumed all input tokens.
+ // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
+ // Merge them so we're left with one.
+ while let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, None);
+ conv.id_alloc().alloc(entry.open_range, None);
+ }
+ }
+}
+
fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
@@ -509,12 +645,12 @@ trait TokenConverter: Sized {
fn id_alloc(&mut self) -> &mut TokenIdAlloc;
}
-impl<'a> SrcToken<RawConverter<'a>> for usize {
- fn kind(&self, ctx: &RawConverter<'a>) -> SyntaxKind {
+impl SrcToken<RawConverter<'_>> for usize {
+ fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'a>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
@@ -522,12 +658,12 @@ impl<'a> SrcToken<RawConverter<'a>> for usize {
ctx.lexed.text(*self).into()
}
- fn synthetic_id(&self, _ctx: &RawConverter<'a>) -> Option<SyntheticTokenId> {
+ fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option<SyntheticTokenId> {
None
}
}
-impl<'a> TokenConverter for RawConverter<'a> {
+impl TokenConverter for RawConverter<'_> {
type Token = usize;
fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
@@ -800,7 +936,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl<'a> TtTreeSink<'a> {
+impl TtTreeSink<'_> {
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
index c923e7a69..73a27df5d 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -110,4 +110,15 @@ impl TokenMap {
// FIXME: This could be accidentally quadratic
self.entries.remove(idx);
}
+
+ pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
+ self.entries.iter().filter_map(|&(tid, tr)| match tr {
+ TokenTextRange::Token(range) => Some((tid, range)),
+ TokenTextRange::Delimiter(_) => None,
+ })
+ }
+
+ pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
+ self.entries.retain(|&(tid, _)| id(tid));
+ }
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index 59dbf1568..79ff8ca28 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -197,4 +197,4 @@ impl<'a> Iterator for TtIter<'a> {
}
}
-impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {}
+impl std::iter::ExactSizeIterator for TtIter<'_> {}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index 1814e0e54..333318f53 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -165,6 +165,40 @@ pub(crate) mod entry {
}
m.complete(p, ERROR);
}
+
+ pub(crate) fn eager_macro_input(p: &mut Parser<'_>) {
+ let m = p.start();
+
+ let closing_paren_kind = match p.current() {
+ T!['{'] => T!['}'],
+ T!['('] => T![')'],
+ T!['['] => T![']'],
+ _ => {
+ p.error("expected `{`, `[`, `(`");
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ return;
+ }
+ };
+ p.bump_any();
+ while !p.at(EOF) && !p.at(closing_paren_kind) {
+ expressions::expr(p);
+ if !p.at(EOF) && !p.at(closing_paren_kind) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(closing_paren_kind);
+ if p.at(EOF) {
+ m.complete(p, MACRO_EAGER_INPUT);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
}
}
@@ -211,70 +245,54 @@ impl BlockLike {
const VISIBILITY_FIRST: TokenSet = TokenSet::new(&[T![pub], T![crate]]);
fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
- match p.current() {
- T![pub] => {
- let m = p.start();
- p.bump(T![pub]);
- if p.at(T!['(']) {
- match p.nth(1) {
- // test crate_visibility
- // pub(crate) struct S;
- // pub(self) struct S;
- // pub(super) struct S;
-
- // test_err crate_visibility_empty_recover
- // pub() struct S;
-
- // test pub_parens_typepath
- // struct B(pub (super::A));
- // struct B(pub (crate::A,));
- T![crate] | T![self] | T![super] | T![ident] | T![')'] if p.nth(2) != T![:] => {
- // If we are in a tuple struct, then the parens following `pub`
- // might be an tuple field, not part of the visibility. So in that
- // case we don't want to consume an identifier.
-
- // test pub_tuple_field
- // struct MyStruct(pub (u32, u32));
- // struct MyStruct(pub (u32));
- // struct MyStruct(pub ());
- if !(in_tuple_field && matches!(p.nth(1), T![ident] | T![')'])) {
- p.bump(T!['(']);
- paths::use_path(p);
- p.expect(T![')']);
- }
- }
- // test crate_visibility_in
- // pub(in super::A) struct S;
- // pub(in crate) struct S;
- T![in] => {
- p.bump(T!['(']);
- p.bump(T![in]);
- paths::use_path(p);
- p.expect(T![')']);
- }
- _ => {}
+ if !p.at(T![pub]) {
+ return false;
+ }
+
+ let m = p.start();
+ p.bump(T![pub]);
+ if p.at(T!['(']) {
+ match p.nth(1) {
+ // test crate_visibility
+ // pub(crate) struct S;
+ // pub(self) struct S;
+ // pub(super) struct S;
+
+ // test_err crate_visibility_empty_recover
+ // pub() struct S;
+
+ // test pub_parens_typepath
+ // struct B(pub (super::A));
+ // struct B(pub (crate::A,));
+ T![crate] | T![self] | T![super] | T![ident] | T![')'] if p.nth(2) != T![:] => {
+ // If we are in a tuple struct, then the parens following `pub`
+ // might be an tuple field, not part of the visibility. So in that
+ // case we don't want to consume an identifier.
+
+ // test pub_tuple_field
+ // struct MyStruct(pub (u32, u32));
+ // struct MyStruct(pub (u32));
+ // struct MyStruct(pub ());
+ if !(in_tuple_field && matches!(p.nth(1), T![ident] | T![')'])) {
+ p.bump(T!['(']);
+ paths::use_path(p);
+ p.expect(T![')']);
}
}
- m.complete(p, VISIBILITY);
- true
- }
- // test crate_keyword_vis
- // crate fn main() { }
- // struct S { crate field: u32 }
- // struct T(crate u32);
- T![crate] => {
- if p.nth_at(1, T![::]) {
- // test crate_keyword_path
- // fn foo() { crate::foo(); }
- return false;
+ // test crate_visibility_in
+ // pub(in super::A) struct S;
+ // pub(in crate) struct S;
+ T![in] => {
+ p.bump(T!['(']);
+ p.bump(T![in]);
+ paths::use_path(p);
+ p.expect(T![')']);
}
- let m = p.start();
- p.bump(T![crate]);
- m.complete(p, VISIBILITY);
- true
+ _ => {}
}
- _ => false,
}
+ m.complete(p, VISIBILITY);
+ true
}
fn opt_rename(p: &mut Parser<'_>) {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
index e589b6993..211af98e6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
@@ -32,6 +32,9 @@ const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[
])
.union(types::TYPE_FIRST);
+// Despite its name, it can also be used for generic param list.
+const GENERIC_ARG_RECOVERY_SET: TokenSet = TokenSet::new(&[T![>], T![,]]);
+
// test generic_arg
// type T = S<i32>;
fn generic_arg(p: &mut Parser<'_>) -> bool {
@@ -55,6 +58,15 @@ fn generic_arg(p: &mut Parser<'_>) -> bool {
// test assoc_type_eq
// type T = StreamingIterator<Item<'a> = &'a T>;
types::type_(p);
+ } else if p.at_ts(GENERIC_ARG_RECOVERY_SET) {
+ // Although `const_arg()` recovers as expected, we want to
+ // handle those here to give the following message because
+ // we don't know whether this associated item is a type or
+ // const at this point.
+
+ // test_err recover_from_missing_assoc_item_binding
+ // fn f() -> impl Iterator<Item = , Item = > {}
+ p.error("missing associated item binding");
} else {
// test assoc_const_eq
// fn foo<F: Foo<N=3>>() {}
@@ -141,12 +153,17 @@ pub(super) fn const_arg_expr(p: &mut Parser<'_>) {
expressions::literal(p);
lm.complete(p, PREFIX_EXPR);
}
- _ => {
+ _ if paths::is_use_path_start(p) => {
// This shouldn't be hit by `const_arg`
let lm = p.start();
paths::use_path(p);
lm.complete(p, PATH_EXPR);
}
+ _ => {
+ // test_err recover_from_missing_const_default
+ // struct A<const N: i32 = , const M: i32 =>;
+ p.err_recover("expected a generic const argument", GENERIC_ARG_RECOVERY_SET);
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
index 7fcf938ba..8ed1c84c4 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
@@ -79,10 +79,9 @@ fn const_param(p: &mut Parser<'_>, m: Marker) {
p.error("missing type for const parameter");
}
- if p.at(T![=]) {
+ if p.eat(T![=]) {
// test const_param_default_literal
// struct A<const N: i32 = -1>;
- p.bump(T![=]);
// test const_param_default_expression
// struct A<const N: i32 = { 1 }>;
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
index 1c056819f..4e850b1f7 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -328,9 +328,6 @@ fn macro_rules(p: &mut Parser<'_>, m: Marker) {
p.bump_remap(T![macro_rules]);
p.expect(T![!]);
- if p.at(IDENT) {
- name(p);
- }
// Special-case `macro_rules! try`.
// This is a hack until we do proper edition support
@@ -340,6 +337,8 @@ fn macro_rules(p: &mut Parser<'_>, m: Marker) {
let m = p.start();
p.bump_remap(IDENT);
m.complete(p, NAME);
+ } else {
+ name(p);
}
match p.current() {
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index 1aba1f767..c155e8aaf 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -75,6 +75,8 @@ pub enum TopEntryPoint {
/// Edge case -- macros generally don't expand to attributes, with the
/// exception of `cfg_attr` which does!
MetaItem,
+ /// Edge case 2 -- eager macros expand their input to a delimited list of comma separated expressions
+ MacroEagerInput,
}
impl TopEntryPoint {
@@ -87,6 +89,7 @@ impl TopEntryPoint {
TopEntryPoint::Type => grammar::entry::top::type_,
TopEntryPoint::Expr => grammar::entry::top::expr,
TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
+ TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input,
};
let mut p = parser::Parser::new(input);
entry_point(&mut p);
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index 5cdb39700..53cdad649 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -24,7 +24,7 @@ pub enum StrStep<'a> {
Error { msg: &'a str, pos: usize },
}
-impl<'a> LexedStr<'a> {
+impl LexedStr<'_> {
pub fn to_input(&self) -> crate::Input {
let mut res = crate::Input::default();
let mut was_joint = false;
@@ -223,7 +223,8 @@ fn n_attached_trivias<'a>(
) -> usize {
match kind {
CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD
- | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => {
+ | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT
+ | EXTERN_CRATE => {
let mut res = 0;
let mut trivias = trivias.enumerate().peekable();
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
index a8fbcfacf..48f407623 100644
--- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -262,6 +262,7 @@ pub enum SyntaxKind {
TYPE_BOUND_LIST,
MACRO_ITEMS,
MACRO_STMTS,
+ MACRO_EAGER_INPUT,
#[doc(hidden)]
__LAST,
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
index 11f9c34ab..2f3c7febc 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -6,7 +6,6 @@ fn vis() {
check(PrefixEntryPoint::Vis, "fn foo() {}", "");
check(PrefixEntryPoint::Vis, "pub(fn foo() {}", "pub");
check(PrefixEntryPoint::Vis, "pub(crate fn foo() {}", "pub(crate");
- check(PrefixEntryPoint::Vis, "crate fn foo() {}", "crate");
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast
new file mode 100644
index 000000000..fc59db84e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 30: missing associated item binding
+error 39: missing associated item binding
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs
new file mode 100644
index 000000000..e484e433a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs
@@ -0,0 +1 @@
+fn f() -> impl Iterator<Item = , Item = > {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast
new file mode 100644
index 000000000..809ad1b8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "M"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 23: expected a generic const argument
+error 40: expected a generic const argument
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs
new file mode 100644
index 000000000..5bab13da9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = , const M: i32 =>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
deleted file mode 100644
index 07b0210e4..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
+++ /dev/null
@@ -1,63 +0,0 @@
-SOURCE_FILE
- FN
- VISIBILITY
- CRATE_KW "crate"
- WHITESPACE " "
- FN_KW "fn"
- WHITESPACE " "
- NAME
- IDENT "main"
- PARAM_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
- STRUCT
- STRUCT_KW "struct"
- WHITESPACE " "
- NAME
- IDENT "S"
- WHITESPACE " "
- RECORD_FIELD_LIST
- L_CURLY "{"
- WHITESPACE " "
- RECORD_FIELD
- VISIBILITY
- CRATE_KW "crate"
- WHITESPACE " "
- NAME
- IDENT "field"
- COLON ":"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "u32"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
- STRUCT
- STRUCT_KW "struct"
- WHITESPACE " "
- NAME
- IDENT "T"
- TUPLE_FIELD_LIST
- L_PAREN "("
- TUPLE_FIELD
- VISIBILITY
- CRATE_KW "crate"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "u32"
- R_PAREN ")"
- SEMICOLON ";"
- WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
deleted file mode 100644
index e2b5f2161..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-crate fn main() { }
-struct S { crate field: u32 }
-struct T(crate u32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
deleted file mode 100644
index 8d9b61630..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
+++ /dev/null
@@ -1,33 +0,0 @@
-SOURCE_FILE
- FN
- FN_KW "fn"
- WHITESPACE " "
- NAME
- IDENT "foo"
- PARAM_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- EXPR_STMT
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH
- PATH_SEGMENT
- NAME_REF
- CRATE_KW "crate"
- COLON2 "::"
- PATH_SEGMENT
- NAME_REF
- IDENT "foo"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
- SEMICOLON ";"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
deleted file mode 100644
index 0f454d121..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
+++ /dev/null
@@ -1 +0,0 @@
-fn foo() { crate::foo(); }
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
index e0c20a414..88b8d0aee 100644
--- a/src/tools/rust-analyzer/crates/paths/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -6,7 +6,7 @@
use std::{
borrow::Borrow,
ffi::OsStr,
- ops,
+ fmt, ops,
path::{Component, Path, PathBuf},
};
@@ -95,6 +95,12 @@ impl AbsPathBuf {
}
}
+impl fmt::Display for AbsPathBuf {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0.display(), f)
+ }
+}
+
/// Wrapper around an absolute [`Path`].
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
#[repr(transparent)]
@@ -217,6 +223,7 @@ impl AbsPath {
pub fn as_os_str(&self) -> &OsStr {
self.0.as_os_str()
}
+ #[deprecated(note = "use Display instead")]
pub fn display(&self) -> std::path::Display<'_> {
self.0.display()
}
@@ -227,6 +234,12 @@ impl AbsPath {
// endregion:delegate-methods
}
+impl fmt::Display for AbsPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0.display(), f)
+ }
+}
+
/// Wrapper around a relative [`PathBuf`].
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct RelPathBuf(PathBuf);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index d3486e755..4229f2891 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false
[dependencies]
-object = { version = "0.30.2", default-features = false, features = [
+object = { version = "0.32.0", default-features = false, features = [
"std",
"read_core",
"elf",
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
index 13f67a012..48efbf589 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -135,7 +135,12 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
}
};
- let mut snappy_decoder = SnapDecoder::new(snappy_portion);
+ let mut uncompressed: Box<dyn Read> = if &snappy_portion[0..4] == b"rust" {
+ // Not compressed.
+ Box::new(snappy_portion)
+ } else {
+ Box::new(SnapDecoder::new(snappy_portion))
+ };
// the bytes before version string bytes, so this basically is:
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
@@ -144,11 +149,11 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
// so 13 bytes in total, and we should check the 13th byte
// to know the length
let mut bytes_before_version = [0u8; 13];
- snappy_decoder.read_exact(&mut bytes_before_version)?;
+ uncompressed.read_exact(&mut bytes_before_version)?;
let length = bytes_before_version[12];
let mut version_string_utf8 = vec![0u8; length as usize];
- snappy_decoder.read_exact(&mut version_string_utf8)?;
+ uncompressed.read_exact(&mut version_string_utf8)?;
let version_string = String::from_utf8(version_string_utf8);
version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
index d5eb157bf..99993f16e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -12,14 +12,14 @@ rust-version.workspace = true
doctest = false
[dependencies]
-object = { version = "0.30.2", default-features = false, features = [
+object = { version = "0.32.0", default-features = false, features = [
"std",
"read_core",
"elf",
"macho",
"pe",
] }
-libloading = "0.7.3"
+libloading = "0.8.0"
memmap2 = "0.5.4"
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
index 1980d4c78..fe18451d3 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
@@ -17,7 +17,10 @@ use token_stream::TokenStreamBuilder;
mod symbol;
pub use symbol::*;
-use std::ops::{Bound, Range};
+use std::{
+ iter,
+ ops::{Bound, Range},
+};
use crate::tt;
@@ -80,9 +83,7 @@ impl server::TokenStream for RustAnalyzer {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
- use std::str::FromStr;
-
- Self::TokenStream::from_str(src).expect("cannot parse string")
+ src.parse().expect("cannot parse string")
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@@ -101,7 +102,7 @@ impl server::TokenStream for RustAnalyzer {
},
};
let tree = TokenTree::from(group);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Ident(ident) => {
@@ -111,7 +112,7 @@ impl server::TokenStream for RustAnalyzer {
let ident: tt::Ident = tt::Ident { text, span: ident.span };
let leaf = tt::Leaf::from(ident);
let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Literal(literal) => {
@@ -123,7 +124,7 @@ impl server::TokenStream for RustAnalyzer {
let literal = tt::Literal { text, span: literal.0.span };
let leaf = tt::Leaf::from(literal);
let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Punct(p) => {
@@ -134,7 +135,7 @@ impl server::TokenStream for RustAnalyzer {
};
let leaf = tt::Leaf::from(punct);
let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
}
}
@@ -355,12 +356,12 @@ impl server::Server for RustAnalyzer {
}
fn intern_symbol(ident: &str) -> Self::Symbol {
- // FIXME: should be self.interner once the proc-macro api allows is
+ // FIXME: should be `self.interner` once the proc-macro api allows it.
Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
}
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
- // FIXME: should be self.interner once the proc-macro api allows is
+ // FIXME: should be `self.interner` once the proc-macro api allows it.
f(symbol.text(&SYMBOL_INTERNER).as_str())
}
}
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
index 602e74275..937834a82 100644
--- a/src/tools/rust-analyzer/crates/profile/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
once_cell = "1.17.0"
cfg-if = "1.0.0"
libc = "0.2.135"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
countme = { version = "3.0.1", features = ["enable"] }
jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
diff --git a/src/tools/rust-analyzer/crates/profile/src/tree.rs b/src/tools/rust-analyzer/crates/profile/src/tree.rs
index 62f0c30b5..1290fba36 100644
--- a/src/tools/rust-analyzer/crates/profile/src/tree.rs
+++ b/src/tools/rust-analyzer/crates/profile/src/tree.rs
@@ -72,7 +72,7 @@ struct NodeIter<'a, T> {
next: Option<Idx<T>>,
}
-impl<'a, T> Iterator for NodeIter<'a, T> {
+impl<T> Iterator for NodeIter<'_, T> {
type Item = Idx<T>;
fn next(&mut self) -> Option<Idx<T>> {
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
index 3abff64a8..75977fc5b 100644
--- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -20,7 +20,7 @@ serde_json.workspace = true
serde.workspace = true
triomphe.workspace = true
anyhow = "1.0.62"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
itertools = "0.10.5"
# local deps
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
index 6cbf403cb..fb0f3ab7d 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -225,9 +225,8 @@ impl WorkspaceBuildScripts {
let package_build_data = &mut res[idx].outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
- "{}: {:?}",
- workspace[package].manifest.parent().display(),
- package_build_data,
+ "{}: {package_build_data:?}",
+ workspace[package].manifest.parent(),
);
}
}
@@ -270,9 +269,8 @@ impl WorkspaceBuildScripts {
let package_build_data = &outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
- "{}: {:?}",
- workspace[package].manifest.parent().display(),
- package_build_data,
+ "{}: {package_build_data:?}",
+ workspace[package].manifest.parent(),
);
}
}
@@ -424,7 +422,7 @@ impl WorkspaceBuildScripts {
let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir))
.map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?;
- tracing::info!("Loading rustc proc-macro paths from {}", target_libdir.display());
+ tracing::info!("Loading rustc proc-macro paths from {target_libdir}");
let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)?
.filter_map(|entry| {
@@ -458,9 +456,8 @@ impl WorkspaceBuildScripts {
let package_build_data = &bs.outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
- "{}: {:?}",
- rustc[package].manifest.parent().display(),
- package_build_data,
+ "{}: {package_build_data:?}",
+ rustc[package].manifest.parent(),
);
}
}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index 92b454150..e47808a2c 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -4,7 +4,7 @@ use std::path::PathBuf;
use std::str::from_utf8;
use std::{ops, process::Command};
-use anyhow::{Context, Result};
+use anyhow::Context;
use base_db::Edition;
use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
@@ -145,7 +145,7 @@ pub struct PackageDependency {
pub kind: DepKind,
}
-#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DepKind {
/// Available to the library, binary, and dev targets in the package (but not the build script).
Normal,
@@ -156,23 +156,20 @@ pub enum DepKind {
}
impl DepKind {
- fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
- let mut dep_kinds = Vec::new();
+ fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> {
+ let mut dep_kinds = [None; 3];
if list.is_empty() {
- dep_kinds.push(Self::Normal);
+ dep_kinds[0] = Some(Self::Normal);
}
for info in list {
- let kind = match info.kind {
- cargo_metadata::DependencyKind::Normal => Self::Normal,
- cargo_metadata::DependencyKind::Development => Self::Dev,
- cargo_metadata::DependencyKind::Build => Self::Build,
+ match info.kind {
+ cargo_metadata::DependencyKind::Normal => dep_kinds[0] = Some(Self::Normal),
+ cargo_metadata::DependencyKind::Development => dep_kinds[1] = Some(Self::Dev),
+ cargo_metadata::DependencyKind::Build => dep_kinds[2] = Some(Self::Build),
cargo_metadata::DependencyKind::Unknown => continue,
- };
- dep_kinds.push(kind);
+ }
}
- dep_kinds.sort_unstable();
- dep_kinds.dedup();
- dep_kinds.into_iter()
+ dep_kinds.into_iter().flatten()
}
}
@@ -236,7 +233,7 @@ impl CargoWorkspace {
current_dir: &AbsPath,
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Result<cargo_metadata::Metadata> {
+ ) -> anyhow::Result<cargo_metadata::Metadata> {
let targets = find_list_of_build_targets(config, cargo_toml);
let mut meta = MetadataCommand::new();
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index 61acc646f..901dcfd2b 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -31,12 +31,13 @@ pub mod target_data_layout;
mod tests;
use std::{
+ fmt,
fs::{self, read_dir, ReadDir},
io,
process::Command,
};
-use anyhow::{bail, format_err, Context, Result};
+use anyhow::{bail, format_err, Context};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashSet;
@@ -59,19 +60,19 @@ pub enum ProjectManifest {
}
impl ProjectManifest {
- pub fn from_manifest_file(path: AbsPathBuf) -> Result<ProjectManifest> {
+ pub fn from_manifest_file(path: AbsPathBuf) -> anyhow::Result<ProjectManifest> {
let path = ManifestPath::try_from(path)
- .map_err(|path| format_err!("bad manifest path: {}", path.display()))?;
+ .map_err(|path| format_err!("bad manifest path: {path}"))?;
if path.file_name().unwrap_or_default() == "rust-project.json" {
return Ok(ProjectManifest::ProjectJson(path));
}
if path.file_name().unwrap_or_default() == "Cargo.toml" {
return Ok(ProjectManifest::CargoToml(path));
}
- bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display());
+ bail!("project root must point to Cargo.toml or rust-project.json: {path}");
}
- pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> {
+ pub fn discover_single(path: &AbsPath) -> anyhow::Result<ProjectManifest> {
let mut candidates = ProjectManifest::discover(path)?;
let res = match candidates.pop() {
None => bail!("no projects"),
@@ -145,7 +146,17 @@ impl ProjectManifest {
}
}
-fn utf8_stdout(mut cmd: Command) -> Result<String> {
+impl fmt::Display for ProjectManifest {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProjectManifest::ProjectJson(it) | ProjectManifest::CargoToml(it) => {
+ fmt::Display::fmt(&it, f)
+ }
+ }
+ }
+}
+
+fn utf8_stdout(mut cmd: Command) -> anyhow::Result<String> {
let output = cmd.output().with_context(|| format!("{cmd:?} failed"))?;
if !output.status.success() {
match String::from_utf8(output.stderr) {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
index 3f60e4dd9..490e1a4ea 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
@@ -1,5 +1,5 @@
//! See [`ManifestPath`].
-use std::{ops, path::Path};
+use std::{fmt, ops, path::Path};
use paths::{AbsPath, AbsPathBuf};
@@ -40,6 +40,12 @@ impl ManifestPath {
}
}
+impl fmt::Display for ManifestPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.file, f)
+ }
+}
+
impl ops::Deref for ManifestPath {
type Target = AbsPath;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
index 0066f6717..8392718b2 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
@@ -2,7 +2,6 @@
use std::process::Command;
-use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath};
@@ -23,6 +22,9 @@ pub(crate) fn get(
}
}
+ // Add miri cfg, which is useful for mir eval in stdlib
+ res.push(CfgFlag::Atom("miri".into()));
+
match get_rust_cfgs(cargo_toml, target, extra_env) {
Ok(rustc_cfgs) => {
tracing::debug!(
@@ -44,7 +46,7 @@ fn get_rust_cfgs(
cargo_toml: Option<&ManifestPath>,
target: Option<&str>,
extra_env: &FxHashMap<String, String>,
-) -> Result<String> {
+) -> anyhow::Result<String> {
if let Some(cargo_toml) = cargo_toml {
let mut cargo_config = Command::new(toolchain::cargo());
cargo_config.envs(extra_env);
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
index e3a2de927..da862c9e8 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -85,9 +85,8 @@ impl Sysroot {
" try running `rustup component add rust-src` to possible fix this"
};
Some(format!(
- "could not find libcore in loaded sysroot at `{}`{}",
- self.src_root.as_path().display(),
- var_note,
+ "could not find libcore in loaded sysroot at `{}`{var_note}",
+ self.src_root.as_path(),
))
} else {
None
@@ -99,7 +98,7 @@ impl Sysroot {
impl Sysroot {
/// Attempts to discover the toolchain's sysroot from the given `dir`.
pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> {
- tracing::debug!("discovering sysroot for {}", dir.display());
+ tracing::debug!("discovering sysroot for {dir}");
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
@@ -111,7 +110,7 @@ impl Sysroot {
extra_env: &FxHashMap<String, String>,
src: AbsPathBuf,
) -> Result<Sysroot> {
- tracing::debug!("discovering sysroot for {}", current_dir.display());
+ tracing::debug!("discovering sysroot for {current_dir}");
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?;
Ok(Sysroot::load(sysroot_dir, src))
}
@@ -122,7 +121,7 @@ impl Sysroot {
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
- format_err!("can't load standard library from sysroot path {}", sysroot_dir.display())
+ format_err!("can't load standard library from sysroot path {sysroot_dir}")
})?;
Ok(Sysroot::load(sysroot_dir, sysroot_src_dir))
}
@@ -220,10 +219,10 @@ fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
let core = path.join("core");
if fs::metadata(&core).is_ok() {
- tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
+ tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}");
return Some(path);
}
- tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
+ tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring");
} else {
tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
}
@@ -250,10 +249,9 @@ fn discover_sysroot_src_dir_or_add_component(
format_err!(
"\
can't load standard library from sysroot
-{}
+{sysroot_path}
(discovered via `rustc --print sysroot`)
try installing the Rust source the same way you installed rustc",
- sysroot_path.display(),
)
})
}
@@ -261,7 +259,7 @@ try installing the Rust source the same way you installed rustc",
fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
- tracing::debug!("checking for rustc source code: {}", rustc_src.display());
+ tracing::debug!("checking for rustc source code: {rustc_src}");
if fs::metadata(&rustc_src).is_ok() {
Some(rustc_src)
} else {
@@ -271,7 +269,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
- tracing::debug!("checking sysroot library: {}", rust_src.display());
+ tracing::debug!("checking sysroot library: {rust_src}");
if fs::metadata(&rust_src).is_ok() {
Some(rust_src)
} else {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
index 30ca7b348..cb995857e 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
@@ -1,7 +1,6 @@
//! Runs `rustc --print target-spec-json` to get the target_data_layout.
use std::process::Command;
-use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::{utf8_stdout, ManifestPath};
@@ -10,7 +9,7 @@ pub fn get(
cargo_toml: Option<&ManifestPath>,
target: Option<&str>,
extra_env: &FxHashMap<String, String>,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let output = (|| {
if let Some(cargo_toml) = cargo_toml {
let mut cmd = Command::new(toolchain::rustc());
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index b5fe237fc..f51ea7eeb 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -4,7 +4,7 @@
use std::{collections::VecDeque, fmt, fs, process::Command, sync};
-use anyhow::{format_err, Context, Result};
+use anyhow::{format_err, Context};
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
@@ -151,7 +151,16 @@ impl ProjectWorkspace {
manifest: ProjectManifest,
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Result<ProjectWorkspace> {
+ ) -> anyhow::Result<ProjectWorkspace> {
+ ProjectWorkspace::load_inner(&manifest, config, progress)
+ .with_context(|| format!("Failed to load the project at {manifest}"))
+ }
+
+ fn load_inner(
+ manifest: &ProjectManifest,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> anyhow::Result<ProjectWorkspace> {
let version = |current_dir, cmd_path, prefix: &str| {
let cargo_version = utf8_stdout({
let mut cmd = Command::new(cmd_path);
@@ -167,12 +176,10 @@ impl ProjectWorkspace {
};
let res = match manifest {
ProjectManifest::ProjectJson(project_json) => {
- let file = fs::read_to_string(&project_json).with_context(|| {
- format!("Failed to read json file {}", project_json.display())
- })?;
- let data = serde_json::from_str(&file).with_context(|| {
- format!("Failed to deserialize json file {}", project_json.display())
- })?;
+ let file = fs::read_to_string(&project_json)
+ .with_context(|| format!("Failed to read json file {project_json}"))?;
+ let data = serde_json::from_str(&file)
+ .with_context(|| format!("Failed to deserialize json file {project_json}"))?;
let project_location = project_json.parent().to_path_buf();
let toolchain = version(&*project_location, toolchain::rustc(), "rustc ")?;
let project_json = ProjectJson::new(&project_location, data);
@@ -193,9 +200,7 @@ impl ProjectWorkspace {
)
.with_context(|| {
format!(
- "Failed to read Cargo metadata from Cargo.toml file {}, {:?}",
- cargo_toml.display(),
- toolchain
+ "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}",
)
})?;
let cargo = CargoWorkspace::new(meta);
@@ -203,12 +208,12 @@ impl ProjectWorkspace {
let sysroot = match (&config.sysroot, &config.sysroot_src) {
(Some(RustLibSource::Path(path)), None) => {
Sysroot::with_sysroot_dir(path.clone()).map_err(|e| {
- Some(format!("Failed to find sysroot at {}:{e}", path.display()))
+ Some(format!("Failed to find sysroot at {path}:{e}"))
})
}
(Some(RustLibSource::Discover), None) => {
Sysroot::discover(cargo_toml.parent(), &config.extra_env).map_err(|e| {
- Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
+ Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
}
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
@@ -220,21 +225,19 @@ impl ProjectWorkspace {
&config.extra_env,
sysroot_src.clone(),
).map_err(|e| {
- Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
+ Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
}
(None, _) => Err(None),
};
if let Ok(sysroot) = &sysroot {
- tracing::info!(workspace = %cargo_toml.display(), src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ tracing::info!(workspace = %cargo_toml, src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_dir = match &config.rustc_source {
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
- .map_err(|p| {
- Some(format!("rustc source path is not absolute: {}", p.display()))
- }),
+ .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
Some(RustLibSource::Discover) => {
sysroot.as_ref().ok().and_then(Sysroot::discover_rustc).ok_or_else(|| {
Some(format!("Failed to discover rustc source for sysroot."))
@@ -244,7 +247,7 @@ impl ProjectWorkspace {
};
let rustc = rustc_dir.and_then(|rustc_dir| {
- tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source");
+ tracing::info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
match CargoWorkspace::fetch_metadata(
&rustc_dir,
cargo_toml.parent(),
@@ -266,13 +269,11 @@ impl ProjectWorkspace {
Err(e) => {
tracing::error!(
%e,
- "Failed to read Cargo metadata from rustc source at {}",
- rustc_dir.display()
+ "Failed to read Cargo metadata from rustc source at {rustc_dir}",
);
Err(Some(format!(
- "Failed to read Cargo metadata from rustc source at {}: {e}",
- rustc_dir.display())
- ))
+ "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
+ )))
}
}
});
@@ -330,7 +331,7 @@ impl ProjectWorkspace {
(None, None) => Err(None),
};
if let Ok(sysroot) = &sysroot {
- tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, target, extra_env);
@@ -340,26 +341,23 @@ impl ProjectWorkspace {
pub fn load_detached_files(
detached_files: Vec<AbsPathBuf>,
config: &CargoConfig,
- ) -> Result<ProjectWorkspace> {
+ ) -> anyhow::Result<ProjectWorkspace> {
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => Sysroot::with_sysroot_dir(path.clone())
- .map_err(|e| Some(format!("Failed to find sysroot at {}:{e}", path.display()))),
+ .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}"))),
Some(RustLibSource::Discover) => {
let dir = &detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
Sysroot::discover(dir, &config.extra_env).map_err(|e| {
- Some(format!(
- "Failed to find sysroot for {}. Is rust-src installed? {e}",
- dir.display()
- ))
+ Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
})
}
None => Err(None),
};
if let Ok(sysroot) = &sysroot {
- tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
@@ -370,15 +368,12 @@ impl ProjectWorkspace {
&self,
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Result<WorkspaceBuildScripts> {
+ ) -> anyhow::Result<WorkspaceBuildScripts> {
match self {
ProjectWorkspace::Cargo { cargo, toolchain, .. } => {
WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain)
.with_context(|| {
- format!(
- "Failed to run build scripts for {}",
- &cargo.workspace_root().display()
- )
+ format!("Failed to run build scripts for {}", cargo.workspace_root())
})
}
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
@@ -393,7 +388,7 @@ impl ProjectWorkspace {
workspaces: &[ProjectWorkspace],
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Vec<Result<WorkspaceBuildScripts>> {
+ ) -> Vec<anyhow::Result<WorkspaceBuildScripts>> {
if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace)
|| config.run_build_script_command.is_none()
{
@@ -419,10 +414,7 @@ impl ProjectWorkspace {
ProjectWorkspace::Cargo { cargo, .. } => match outputs {
Ok(outputs) => Ok(outputs.next().unwrap()),
Err(e) => Err(e.clone()).with_context(|| {
- format!(
- "Failed to run build scripts for {}",
- &cargo.workspace_root().display()
- )
+ format!("Failed to run build scripts for {}", cargo.workspace_root())
}),
},
_ => Ok(WorkspaceBuildScripts::default()),
@@ -447,7 +439,7 @@ impl ProjectWorkspace {
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
match self {
ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Ok(sysroot), .. }
@@ -459,22 +451,22 @@ impl ProjectWorkspace {
.map(|segment| sysroot.root().join(segment).join(&standalone_server_name))
.find(|server_path| std::fs::metadata(server_path).is_ok())
.ok_or_else(|| {
- anyhow::anyhow!(
+ anyhow::format_err!(
"cannot find proc-macro server in sysroot `{}`",
- sysroot.root().display()
+ sysroot.root()
)
})
}
ProjectWorkspace::DetachedFiles { .. } => {
- Err(anyhow::anyhow!("cannot find proc-macro server, no sysroot was found"))
+ Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found"))
}
- ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::anyhow!(
+ ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::format_err!(
"cannot find proc-macro-srv, the workspace `{}` is missing a sysroot",
- cargo.workspace_root().display()
+ cargo.workspace_root()
)),
- ProjectWorkspace::Json { project, .. } => Err(anyhow::anyhow!(
+ ProjectWorkspace::Json { project, .. } => Err(anyhow::format_err!(
"cannot find proc-macro-srv, the workspace `{}` is missing a sysroot",
- project.path().display()
+ project.path()
)),
}
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
index 5b72d5756..5bfac7ee4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -34,10 +34,9 @@ serde.workspace = true
rayon = "1.6.1"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
-lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
+lsp-server.workspace = true
tracing = "0.1.35"
tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "env-filter",
"registry",
"fmt",
"tracing-log",
@@ -48,12 +47,8 @@ triomphe.workspace = true
nohash-hasher.workspace = true
always-assert = "0.1.2"
-# These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies
-# so that we don't pull in duplicates of their dependencies like windows-sys and syn 1 vs 2
-# these would pull in serde 2
-thiserror = "=1.0.39"
-serde_repr = "=0.1.11"
-# these would pull in windows-sys 0.45.0
+# These 3 deps are not used by r-a directly, but we list them here to lock in their versions
+# in our transitive deps to prevent them from pulling in windows-sys 0.45.0
mio = "=0.8.5"
filetime = "=0.2.19"
parking_lot_core = "=0.9.6"
@@ -67,13 +62,13 @@ ide-db.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
ide.workspace = true
+load-cargo.workspace = true
proc-macro-api.workspace = true
profile.workspace = true
project-model.workspace = true
stdx.workspace = true
syntax.workspace = true
toolchain.workspace = true
-tt.workspace = true
vfs-notify.workspace = true
vfs.workspace = true
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
index 8caadecd8..1f923f6cf 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
@@ -8,10 +8,11 @@ use std::{
sync::Arc,
};
-use rust_analyzer::Result;
+use anyhow::Context;
use tracing::{level_filters::LevelFilter, Event, Subscriber};
use tracing_log::NormalizeEvent;
use tracing_subscriber::{
+ filter::Targets,
fmt::{
format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields,
FormattedFields, MakeWriter,
@@ -19,81 +20,62 @@ use tracing_subscriber::{
layer::SubscriberExt,
registry::LookupSpan,
util::SubscriberInitExt,
- EnvFilter, Registry,
+ Registry,
};
use tracing_tree::HierarchicalLayer;
-pub(crate) struct Logger {
- filter: EnvFilter,
- file: Option<File>,
+pub(crate) struct LoggerConfig {
+ pub(crate) log_file: Option<File>,
+ pub(crate) filter: String,
+ pub(crate) chalk_filter: Option<String>,
}
struct MakeWriterStderr;
-impl<'a> MakeWriter<'a> for MakeWriterStderr {
+impl MakeWriter<'_> for MakeWriterStderr {
type Writer = Stderr;
- fn make_writer(&'a self) -> Self::Writer {
+ fn make_writer(&self) -> Self::Writer {
io::stderr()
}
}
-impl Logger {
- pub(crate) fn new(file: Option<File>, filter: Option<&str>) -> Logger {
- let filter = filter.map_or(EnvFilter::default(), EnvFilter::new);
-
- Logger { filter, file }
- }
+impl LoggerConfig {
+ pub(crate) fn init(self) -> anyhow::Result<()> {
+ let mut filter: Targets = self
+ .filter
+ .parse()
+ .with_context(|| format!("invalid log filter: `{}`", self.filter))?;
+
+ let mut chalk_layer = None;
+ if let Some(chalk_filter) = self.chalk_filter {
+ let level: LevelFilter =
+ chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
+ chalk_layer = Some(
+ HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(io::stderr),
+ );
+ filter = filter
+ .with_target("chalk_solve", level)
+ .with_target("chalk_ir", level)
+ .with_target("chalk_recursive", level);
+ };
- pub(crate) fn install(self) -> Result<()> {
- // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
- // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
- // logs. But now we can only have just one filter, which means we have to
- // merge chalk filter to our main filter (from RA_LOG env).
- //
- // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
- // As the value should only affect chalk crates, we'd better manually
- // specify the target. And for simplicity, CHALK_DEBUG only accept the value
- // that specify level.
- let chalk_level_dir = std::env::var("CHALK_DEBUG")
- .map(|val| {
- val.parse::<LevelFilter>().expect(
- "invalid CHALK_DEBUG value, expect right log level (like debug or trace)",
- )
- })
- .ok();
-
- let chalk_layer = HierarchicalLayer::default()
- .with_indent_lines(true)
- .with_ansi(false)
- .with_indent_amount(2)
- .with_writer(io::stderr);
-
- let writer = match self.file {
+ let writer = match self.log_file {
Some(file) => BoxMakeWriter::new(Arc::new(file)),
None => BoxMakeWriter::new(io::stderr),
};
let ra_fmt_layer =
tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer);
- match chalk_level_dir {
- Some(val) => {
- Registry::default()
- .with(
- self.filter
- .add_directive(format!("chalk_solve={val}").parse()?)
- .add_directive(format!("chalk_ir={val}").parse()?)
- .add_directive(format!("chalk_recursive={val}").parse()?),
- )
- .with(ra_fmt_layer)
- .with(chalk_layer)
- .init();
- }
- None => {
- Registry::default().with(self.filter).with(ra_fmt_layer).init();
- }
- };
-
+ let registry = Registry::default().with(filter).with(ra_fmt_layer);
+ match chalk_layer {
+ Some(chalk_layer) => registry.with(chalk_layer).init(),
+ None => registry.init(),
+ }
Ok(())
}
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 91911dd18..2fa14fc7e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -7,14 +7,11 @@
mod logger;
mod rustc_wrapper;
-use std::{
- env, fs,
- path::{Path, PathBuf},
- process,
-};
+use std::{env, fs, path::PathBuf, process};
+use anyhow::Context;
use lsp_server::Connection;
-use rust_analyzer::{cli::flags, config::Config, from_json, Result};
+use rust_analyzer::{cli::flags, config::Config, from_json};
use vfs::AbsPathBuf;
#[cfg(all(feature = "mimalloc"))]
@@ -25,7 +22,7 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
-fn main() {
+fn main() -> anyhow::Result<()> {
if std::env::var("RA_RUSTC_WRAPPER").is_ok() {
let mut args = std::env::args_os();
let _me = args.next().unwrap();
@@ -41,14 +38,7 @@ fn main() {
}
let flags = flags::RustAnalyzer::from_env_or_exit();
- if let Err(err) = try_main(flags) {
- tracing::error!("Unexpected error: {}", err);
- eprintln!("{err}");
- process::exit(101);
- }
-}
-fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
#[cfg(debug_assertions)]
if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
#[allow(unused_mut)]
@@ -58,14 +48,8 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
}
}
- let mut log_file = flags.log_file.as_deref();
-
- let env_log_file = env::var("RA_LOG_FILE").ok();
- if let Some(env_log_file) = env_log_file.as_deref() {
- log_file = Some(Path::new(env_log_file));
- }
+ setup_logging(flags.log_file.clone())?;
- setup_logging(log_file)?;
let verbosity = flags.verbosity();
match flags.subcommand {
@@ -98,11 +82,12 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
}
Ok(())
}
-fn setup_logging(log_file: Option<&Path>) -> Result<()> {
+fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
if cfg!(windows) {
// This is required so that windows finds our pdb that is placed right beside the exe.
// By default it doesn't look at the folder the exe resides in, only in the current working
@@ -115,23 +100,42 @@ fn setup_logging(log_file: Option<&Path>) -> Result<()> {
}
}
}
+
if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "short");
}
+ let log_file = env::var("RA_LOG_FILE").ok().map(PathBuf::from).or(log_file_flag);
let log_file = match log_file {
Some(path) => {
if let Some(parent) = path.parent() {
let _ = fs::create_dir_all(parent);
}
- Some(fs::File::create(path)?)
+ Some(
+ fs::File::create(&path)
+ .with_context(|| format!("can't create log file at {}", path.display()))?,
+ )
}
None => None,
};
- let filter = env::var("RA_LOG").ok();
- // deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually useful
- // information in there for debugging
- logger::Logger::new(log_file, filter.as_deref().or(Some("error"))).install()?;
+
+ logger::LoggerConfig {
+ log_file,
+ // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually
+ // useful information in there for debugging.
+ filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()),
+ // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
+ // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
+ // logs. But now we can only have just one filter, which means we have to
+ // merge chalk filter to our main filter (from RA_LOG env).
+ //
+ // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
+ // As the value should only affect chalk crates, we'd better manually
+ // specify the target. And for simplicity, CHALK_DEBUG only accept the value
+ // that specify level.
+ chalk_filter: env::var("CHALK_DEBUG").ok(),
+ }
+ .init()?;
profile::init();
@@ -146,8 +150,8 @@ const STACK_SIZE: usize = 1024 * 1024 * 8;
fn with_extra_thread(
thread_name: impl Into<String>,
thread_intent: stdx::thread::ThreadIntent,
- f: impl FnOnce() -> Result<()> + Send + 'static,
-) -> Result<()> {
+ f: impl FnOnce() -> anyhow::Result<()> + Send + 'static,
+) -> anyhow::Result<()> {
let handle = stdx::thread::Builder::new(thread_intent)
.name(thread_name.into())
.stack_size(STACK_SIZE)
@@ -158,7 +162,7 @@ fn with_extra_thread(
Ok(())
}
-fn run_server() -> Result<()> {
+fn run_server() -> anyhow::Result<()> {
tracing::info!("server version {} will start", rust_analyzer::version());
let (connection, io_threads) = Connection::stdio();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
index e35201921..64646b33a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -1,7 +1,6 @@
//! Various batch processing tasks, intended primarily for debugging.
pub mod flags;
-pub mod load_cargo;
mod parse;
mod symbols;
mod highlight;
@@ -10,13 +9,17 @@ mod diagnostics;
mod ssr;
mod lsif;
mod scip;
+mod run_tests;
mod progress_report;
use std::io::Read;
use anyhow::Result;
+use hir::{Module, Name};
+use hir_ty::db::HirDatabase;
use ide::AnalysisHost;
+use itertools::Itertools;
use vfs::Vfs;
#[derive(Clone, Copy)]
@@ -36,7 +39,7 @@ impl Verbosity {
}
}
-fn read_stdin() -> Result<String> {
+fn read_stdin() -> anyhow::Result<String> {
let mut buff = String::new();
std::io::stdin().read_to_string(&mut buff)?;
Ok(buff)
@@ -71,3 +74,14 @@ fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
eprintln!("{remaining:>8} Remaining");
}
+
+fn full_name_of_item(db: &dyn HirDatabase, module: Module, name: Name) -> String {
+ module
+ .path_to_root(db)
+ .into_iter()
+ .rev()
+ .filter_map(|it| it.name(db))
+ .chain(Some(name))
+ .map(|it| it.display(db.upcast()).to_string())
+ .join("::")
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 4cb917ce2..f446a7c05 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -8,14 +8,14 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
- Adt, AssocItem, Crate, DefWithBody, HasCrate, HasSource, HirDisplay, ModuleDef, Name,
+ Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
hir::{ExprId, PatId},
};
use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
-use ide::{LineCol, RootDatabase};
+use ide::{Analysis, AnnotationConfig, DiagnosticsConfig, InlayHintsConfig, LineCol, RootDatabase};
use ide_db::{
base_db::{
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
@@ -24,20 +24,20 @@ use ide_db::{
LineIndexDatabase,
};
use itertools::Itertools;
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use oorandom::Rand32;
use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use syntax::{AstNode, SyntaxNode};
-use vfs::{AbsPathBuf, Vfs, VfsPath};
+use vfs::{AbsPathBuf, FileId, Vfs, VfsPath};
use crate::cli::{
flags::{self, OutputFormat},
- load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice},
- print_memory_usage,
+ full_name_of_item, print_memory_usage,
progress_report::ProgressReport,
- report_metric, Result, Verbosity,
+ report_metric, Verbosity,
};
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
@@ -49,7 +49,7 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
}
impl flags::AnalysisStats {
- pub fn run(self, verbosity: Verbosity) -> Result<()> {
+ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
let mut rng = {
let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
Rand32::new(seed)
@@ -95,17 +95,41 @@ impl flags::AnalysisStats {
eprintln!(")");
let mut analysis_sw = self.stop_watch();
- let mut num_crates = 0;
- let mut visited_modules = FxHashSet::default();
- let mut visit_queue = Vec::new();
let mut krates = Crate::all(db);
if self.randomize {
shuffle(&mut rng, &mut krates);
}
+
+ let mut item_tree_sw = self.stop_watch();
+ let mut num_item_trees = 0;
+ let source_roots =
+ krates.iter().cloned().map(|krate| db.file_source_root(krate.root_file(db))).unique();
+ for source_root_id in source_roots {
+ let source_root = db.source_root(source_root_id);
+ if !source_root.is_library || self.with_deps {
+ for file_id in source_root.iter() {
+ if let Some(p) = source_root.path_for_file(&file_id) {
+ if let Some((_, Some("rs"))) = p.name_and_extension() {
+ db.file_item_tree(file_id.into());
+ num_item_trees += 1;
+ }
+ }
+ }
+ }
+ }
+ eprintln!(" item trees: {num_item_trees}");
+ let item_tree_time = item_tree_sw.elapsed();
+ eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
+ report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
+
+ let mut crate_def_map_sw = self.stop_watch();
+ let mut num_crates = 0;
+ let mut visited_modules = FxHashSet::default();
+ let mut visit_queue = Vec::new();
for krate in krates {
- let module = krate.root_module(db);
- let file_id = module.definition_source(db).file_id;
+ let module = krate.root_module();
+ let file_id = module.definition_source_file_id(db);
let file_id = file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
@@ -124,8 +148,10 @@ impl flags::AnalysisStats {
let mut bodies = Vec::new();
let mut adts = Vec::new();
let mut consts = Vec::new();
+ let mut file_ids = Vec::new();
while let Some(module) = visit_queue.pop() {
if visited_modules.insert(module) {
+ file_ids.extend(module.as_source_file_id(db));
visit_queue.extend(module.children(db));
for decl in module.declarations(db) {
@@ -171,7 +197,9 @@ impl flags::AnalysisStats {
adts.len(),
consts.len(),
);
- eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
+ let crate_def_map_time = crate_def_map_sw.elapsed();
+ eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
+ report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
if self.randomize {
shuffle(&mut rng, &mut bodies);
@@ -197,6 +225,10 @@ impl flags::AnalysisStats {
self.run_const_eval(db, &consts, verbosity);
}
+ if self.run_all_ide_things {
+ self.run_ide_things(host.analysis(), file_ids);
+ }
+
let total_span = analysis_sw.elapsed();
eprintln!("{:<20} {total_span}", "Total:");
report_metric("total time", total_span.time.as_millis() as u64, "ms");
@@ -242,21 +274,15 @@ impl flags::AnalysisStats {
continue;
}
all += 1;
- let Err(e)
- = db.layout_of_adt(hir_def::AdtId::from(a).into(), Substitution::empty(Interner), a.krate(db).into())
- else {
- continue
+ let Err(e) = db.layout_of_adt(
+ hir_def::AdtId::from(a).into(),
+ Substitution::empty(Interner),
+ db.trait_environment(a.into()),
+ ) else {
+ continue;
};
if verbosity.is_spammy() {
- let full_name = a
- .module(db)
- .path_to_root(db)
- .into_iter()
- .rev()
- .filter_map(|it| it.name(db))
- .chain(Some(a.name(db)))
- .map(|it| it.display(db).to_string())
- .join("::");
+ let full_name = full_name_of_item(db, a.module(db), a.name(db));
println!("Data layout for {full_name} failed due {e:?}");
}
fail += 1;
@@ -278,15 +304,8 @@ impl flags::AnalysisStats {
continue;
};
if verbosity.is_spammy() {
- let full_name = c
- .module(db)
- .path_to_root(db)
- .into_iter()
- .rev()
- .filter_map(|it| it.name(db))
- .chain(c.name(db))
- .map(|it| it.display(db).to_string())
- .join("::");
+ let full_name =
+ full_name_of_item(db, c.module(db), c.name(db).unwrap_or(Name::missing()));
println!("Const eval for {full_name} failed due {e:?}");
}
fail += 1;
@@ -717,6 +736,83 @@ impl flags::AnalysisStats {
report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
}
+ fn run_ide_things(&self, analysis: Analysis, mut file_ids: Vec<FileId>) {
+ file_ids.sort();
+ file_ids.dedup();
+ let mut sw = self.stop_watch();
+
+ for &file_id in &file_ids {
+ _ = analysis.diagnostics(
+ &DiagnosticsConfig {
+ enabled: true,
+ proc_macros_enabled: true,
+ proc_attr_macros_enabled: true,
+ disable_experimental: false,
+ disabled: Default::default(),
+ expr_fill_default: Default::default(),
+ insert_use: ide_db::imports::insert_use::InsertUseConfig {
+ granularity: ide_db::imports::insert_use::ImportGranularity::Crate,
+ enforce_granularity: true,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ group: true,
+ skip_glob_imports: true,
+ },
+ prefer_no_std: Default::default(),
+ },
+ ide::AssistResolveStrategy::All,
+ file_id,
+ );
+ }
+ for &file_id in &file_ids {
+ _ = analysis.inlay_hints(
+ &InlayHintsConfig {
+ render_colons: false,
+ type_hints: true,
+ discriminant_hints: ide::DiscriminantHints::Always,
+ parameter_hints: true,
+ chaining_hints: true,
+ adjustment_hints: ide::AdjustmentHints::Always,
+ adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix,
+ adjustment_hints_hide_outside_unsafe: false,
+ closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
+ closure_capture_hints: true,
+ binding_mode_hints: true,
+ lifetime_elision_hints: ide::LifetimeElisionHints::Always,
+ param_names_for_lifetime_elision_hints: true,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ closure_style: hir::ClosureStyle::ImplFn,
+ max_length: Some(25),
+ closing_brace_hints_min_lines: Some(20),
+ },
+ file_id,
+ None,
+ );
+ }
+ for &file_id in &file_ids {
+ analysis
+ .annotations(
+ &AnnotationConfig {
+ binary_target: true,
+ annotate_runnables: true,
+ annotate_impls: true,
+ annotate_references: false,
+ annotate_method_references: false,
+ annotate_enum_variant_references: false,
+ location: ide::AnnotationLocation::AboveName,
+ },
+ file_id,
+ )
+ .unwrap()
+ .into_iter()
+ .for_each(|annotation| {
+ _ = analysis.resolve_annotation(annotation);
+ });
+ }
+ let ide_time = sw.elapsed();
+ eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
+ }
+
fn stop_watch(&self) -> StopWatch {
StopWatch::start().memory(self.memory_usage)
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 4306d7212..8541be715 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -7,11 +7,9 @@ use rustc_hash::FxHashSet;
use hir::{db::HirDatabase, Crate, Module};
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
use ide_db::base_db::SourceDatabaseExt;
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
-use crate::cli::{
- flags,
- load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice},
-};
+use crate::cli::flags;
impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> {
@@ -37,14 +35,14 @@ impl flags::Diagnostics {
let mut visited_files = FxHashSet::default();
let work = all_modules(db).into_iter().filter(|module| {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
!source_root.is_library
});
for module in work {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
if !visited_files.contains(&file_id) {
let crate_name =
module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
@@ -82,7 +80,7 @@ impl flags::Diagnostics {
fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
let mut worklist: Vec<_> =
- Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ Crate::all(db).into_iter().map(|krate| krate.root_module()).collect();
let mut modules = Vec::new();
while let Some(module) = worklist.pop() {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
index 208a4e6ec..13b7f039b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -12,7 +12,7 @@ xflags::xflags! {
/// LSP server for the Rust programming language.
///
/// Subcommands and their flags do not provide any stability guarantees and may be removed or
- /// changed without notice. Top-level flags that are not are marked as [Unstable] provide
+ /// changed without notice. Top-level flags that are not marked as [Unstable] provide
/// backwards-compatibility and may be relied on.
cmd rust-analyzer {
/// Verbosity level, can be repeated multiple times.
@@ -88,6 +88,16 @@ xflags::xflags! {
optional --skip-data-layout
/// Skip const evaluation
optional --skip-const-eval
+ /// Runs several IDE features after analysis, including semantics highlighting, diagnostics
+ /// and annotations. This is useful for benchmarking the memory usage on a project that has
+ /// been worked on for a bit in a longer running session.
+ optional --run-all-ide-things
+ }
+
+ /// Run unit tests of the project using mir interpreter
+ cmd run-tests {
+ /// Directory with Cargo.toml.
+ required path: PathBuf
}
cmd diagnostics {
@@ -103,7 +113,7 @@ xflags::xflags! {
}
cmd ssr {
- /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
+ /// A structured search replace rule (`$a.foo($b) ==>> bar($a, $b)`)
repeated rule: SsrRule
}
@@ -147,6 +157,7 @@ pub enum RustAnalyzerCmd {
Symbols(Symbols),
Highlight(Highlight),
AnalysisStats(AnalysisStats),
+ RunTests(RunTests),
Diagnostics(Diagnostics),
Ssr(Ssr),
Search(Search),
@@ -182,16 +193,22 @@ pub struct AnalysisStats {
pub parallel: bool,
pub memory_usage: bool,
pub source_stats: bool,
- pub skip_lowering: bool,
- pub skip_inference: bool,
- pub skip_mir_stats: bool,
- pub skip_data_layout: bool,
- pub skip_const_eval: bool,
pub only: Option<String>,
pub with_deps: bool,
pub no_sysroot: bool,
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
+ pub skip_lowering: bool,
+ pub skip_inference: bool,
+ pub skip_mir_stats: bool,
+ pub skip_data_layout: bool,
+ pub skip_const_eval: bool,
+ pub run_all_ide_things: bool,
+}
+
+#[derive(Debug)]
+pub struct RunTests {
+ pub path: PathBuf,
}
#[derive(Debug)]
@@ -223,6 +240,7 @@ pub struct Lsif {
#[derive(Debug)]
pub struct Scip {
pub path: PathBuf,
+
pub output: Option<PathBuf>,
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
deleted file mode 100644
index 4e8f99971..000000000
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
+++ /dev/null
@@ -1,205 +0,0 @@
-//! Loads a Cargo project into a static instance of analysis, without support
-//! for incorporating changes.
-use std::path::Path;
-
-use anyhow::{anyhow, Result};
-use crossbeam_channel::{unbounded, Receiver};
-use ide::{AnalysisHost, Change};
-use ide_db::{
- base_db::{CrateGraph, ProcMacros},
- FxHashMap,
-};
-use proc_macro_api::ProcMacroServer;
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
-use triomphe::Arc;
-use vfs::{loader::Handle, AbsPath, AbsPathBuf};
-
-use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
-
-// Note: Since this type is used by external tools that use rust-analyzer as a library
-// what otherwise would be `pub(crate)` has to be `pub` here instead.
-pub struct LoadCargoConfig {
- pub load_out_dirs_from_check: bool,
- pub with_proc_macro_server: ProcMacroServerChoice,
- pub prefill_caches: bool,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ProcMacroServerChoice {
- Sysroot,
- Explicit(AbsPathBuf),
- None,
-}
-
-// Note: Since this function is used by external tools that use rust-analyzer as a library
-// what otherwise would be `pub(crate)` has to be `pub` here instead.
-pub fn load_workspace_at(
- root: &Path,
- cargo_config: &CargoConfig,
- load_config: &LoadCargoConfig,
- progress: &dyn Fn(String),
-) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
- let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
- let root = ProjectManifest::discover_single(&root)?;
- let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
-
- if load_config.load_out_dirs_from_check {
- let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
- workspace.set_build_scripts(build_scripts)
- }
-
- load_workspace(workspace, &cargo_config.extra_env, load_config)
-}
-
-// Note: Since this function is used by external tools that use rust-analyzer as a library
-// what otherwise would be `pub(crate)` has to be `pub` here instead.
-//
-// The reason both, `load_workspace_at` and `load_workspace` are `pub` is that some of
-// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
-pub fn load_workspace(
- ws: ProjectWorkspace,
- extra_env: &FxHashMap<String, String>,
- load_config: &LoadCargoConfig,
-) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
- let (sender, receiver) = unbounded();
- let mut vfs = vfs::Vfs::default();
- let mut loader = {
- let loader =
- vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
- Box::new(loader)
- };
-
- let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)),
- ProcMacroServerChoice::Explicit(path) => {
- ProcMacroServer::spawn(path.clone()).map_err(Into::into)
- }
- ProcMacroServerChoice::None => Err(anyhow!("proc macro server disabled")),
- };
-
- let (crate_graph, proc_macros) = ws.to_crate_graph(
- &mut |path: &AbsPath| {
- let contents = loader.load_sync(path);
- let path = vfs::VfsPath::from(path.to_path_buf());
- vfs.set_file_contents(path.clone(), contents);
- vfs.file_id(&path)
- },
- extra_env,
- );
- let proc_macros = {
- let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err(e) => Err(e.to_string()),
- };
- proc_macros
- .into_iter()
- .map(|(crate_id, path)| {
- (
- crate_id,
- path.map_or_else(
- |_| Err("proc macro crate is missing dylib".to_owned()),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
- )
- })
- .collect()
- };
-
- let project_folders = ProjectFolders::new(&[ws], &[]);
- loader.set_config(vfs::loader::Config {
- load: project_folders.load,
- watch: vec![],
- version: 0,
- });
-
- tracing::debug!("crate graph: {:?}", crate_graph);
- let host = load_crate_graph(
- crate_graph,
- proc_macros,
- project_folders.source_root_config,
- &mut vfs,
- &receiver,
- );
-
- if load_config.prefill_caches {
- host.analysis().parallel_prime_caches(1, |_| {})?;
- }
- Ok((host, vfs, proc_macro_server.ok()))
-}
-
-fn load_crate_graph(
- crate_graph: CrateGraph,
- proc_macros: ProcMacros,
- source_root_config: SourceRootConfig,
- vfs: &mut vfs::Vfs,
- receiver: &Receiver<vfs::loader::Message>,
-) -> AnalysisHost {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
- let mut host = AnalysisHost::new(lru_cap);
- let mut analysis_change = Change::new();
-
- host.raw_database_mut().enable_proc_attr_macros();
-
- // wait until Vfs has loaded all roots
- for task in receiver {
- match task {
- vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
- if n_done == n_total {
- break;
- }
- }
- vfs::loader::Message::Loaded { files } => {
- for (path, contents) in files {
- vfs.set_file_contents(path.into(), contents);
- }
- }
- }
- }
- let changes = vfs.take_changes();
- for file in changes {
- if file.exists() {
- let contents = vfs.file_contents(file.file_id);
- if let Ok(text) = std::str::from_utf8(contents) {
- analysis_change.change_file(file.file_id, Some(Arc::from(text)))
- }
- }
- }
- let source_roots = source_root_config.partition(vfs);
- analysis_change.set_roots(source_roots);
-
- analysis_change.set_crate_graph(crate_graph);
- analysis_change.set_proc_macros(proc_macros);
-
- host.apply_change(analysis_change);
- host
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- use hir::Crate;
-
- #[test]
- fn test_loading_rust_analyzer() {
- let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
- let cargo_config = CargoConfig::default();
- let load_cargo_config = LoadCargoConfig {
- load_out_dirs_from_check: false,
- with_proc_macro_server: ProcMacroServerChoice::None,
- prefill_caches: false,
- };
- let (host, _vfs, _proc_macro) =
- load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
-
- let n_crates = Crate::all(host.raw_database()).len();
- // RA has quite a few crates, but the exact count doesn't matter
- assert!(n_crates > 20);
- }
-}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index 7f5d08449..42d180114 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -8,23 +8,22 @@ use ide::{
Analysis, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, StaticIndex,
StaticIndexedFile, TokenId, TokenStaticData,
};
-use ide_db::LineIndexDatabase;
-
-use ide_db::base_db::salsa::{self, ParallelDatabase};
-use ide_db::line_index::WideEncoding;
+use ide_db::{
+ base_db::salsa::{self, ParallelDatabase},
+ line_index::WideEncoding,
+ LineIndexDatabase,
+};
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use lsp_types::{self, lsif};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use vfs::{AbsPathBuf, Vfs};
-use crate::cli::load_cargo::ProcMacroServerChoice;
-use crate::cli::{
- flags,
- load_cargo::{load_workspace, LoadCargoConfig},
- Result,
+use crate::{
+ cli::flags,
+ line_index::{LineEndings, LineIndex, PositionEncoding},
+ to_proto,
+ version::version,
};
-use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
-use crate::to_proto;
-use crate::version::version;
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
struct Snap<DB>(DB);
@@ -49,8 +48,8 @@ struct LsifManager<'a> {
struct Id(i32);
impl From<Id> for lsp_types::NumberOrString {
- fn from(Id(x): Id) -> Self {
- lsp_types::NumberOrString::Number(x)
+ fn from(Id(it): Id) -> Self {
+ lsp_types::NumberOrString::Number(it)
}
}
@@ -89,8 +88,8 @@ impl LsifManager<'_> {
}
fn get_token_id(&mut self, id: TokenId) -> Id {
- if let Some(x) = self.token_map.get(&id) {
- return *x;
+ if let Some(it) = self.token_map.get(&id) {
+ return *it;
}
let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None }));
self.token_map.insert(id, result_set_id);
@@ -98,8 +97,8 @@ impl LsifManager<'_> {
}
fn get_package_id(&mut self, package_information: PackageInformation) -> Id {
- if let Some(x) = self.package_map.get(&package_information) {
- return *x;
+ if let Some(it) = self.package_map.get(&package_information) {
+ return *it;
}
let pi = package_information.clone();
let result_set_id =
@@ -120,8 +119,8 @@ impl LsifManager<'_> {
}
fn get_range_id(&mut self, id: FileRange) -> Id {
- if let Some(x) = self.range_map.get(&id) {
- return *x;
+ if let Some(it) = self.range_map.get(&id) {
+ return *it;
}
let file_id = id.file_id;
let doc_id = self.get_file_id(file_id);
@@ -143,8 +142,8 @@ impl LsifManager<'_> {
}
fn get_file_id(&mut self, id: FileId) -> Id {
- if let Some(x) = self.file_map.get(&id) {
- return *x;
+ if let Some(it) = self.file_map.get(&id) {
+ return *it;
}
let path = self.vfs.file_path(id);
let path = path.as_path().unwrap();
@@ -217,18 +216,18 @@ impl LsifManager<'_> {
}));
let mut edges = token.references.iter().fold(
HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
- |mut edges, x| {
+ |mut edges, it| {
let entry =
- edges.entry((x.range.file_id, x.is_definition)).or_insert_with(Vec::new);
- entry.push((*self.range_map.get(&x.range).unwrap()).into());
+ edges.entry((it.range.file_id, it.is_definition)).or_insert_with(Vec::new);
+ entry.push((*self.range_map.get(&it.range).unwrap()).into());
edges
},
);
- for x in token.references {
- if let Some(vertices) = edges.remove(&(x.range.file_id, x.is_definition)) {
+ for it in token.references {
+ if let Some(vertices) = edges.remove(&(it.range.file_id, it.is_definition)) {
self.add_edge(lsif::Edge::Item(lsif::Item {
- document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
- property: Some(if x.is_definition {
+ document: (*self.file_map.get(&it.range.file_id).unwrap()).into(),
+ property: Some(if it.is_definition {
lsif::ItemKind::Definitions
} else {
lsif::ItemKind::References
@@ -286,7 +285,7 @@ impl LsifManager<'_> {
}
impl flags::Lsif {
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating LSIF started...");
let now = Instant::now();
let mut cargo_config = CargoConfig::default();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
new file mode 100644
index 000000000..e17041991
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
@@ -0,0 +1,89 @@
+//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
+
+use hir::{Crate, Module};
+use hir_ty::db::HirDatabase;
+use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
+use profile::StopWatch;
+use project_model::{CargoConfig, RustLibSource};
+use syntax::TextRange;
+
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+
+use crate::cli::{flags, full_name_of_item, Result};
+
+impl flags::RunTests {
+ pub fn run(self) -> Result<()> {
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
+ let db = host.raw_database();
+
+ let tests = all_modules(db)
+ .into_iter()
+ .flat_map(|x| x.declarations(db))
+ .filter_map(|x| match x {
+ hir::ModuleDef::Function(f) => Some(f),
+ _ => None,
+ })
+ .filter(|x| x.is_test(db));
+ let span_formatter = |file_id, text_range: TextRange| {
+ let line_col = match db.line_index(file_id).try_line_col(text_range.start()) {
+ None => " (unknown line col)".to_string(),
+ Some(x) => format!("#{}:{}", x.line + 1, x.col),
+ };
+ let path = &db
+ .source_root(db.file_source_root(file_id))
+ .path_for_file(&file_id)
+ .map(|x| x.to_string());
+ let path = path.as_deref().unwrap_or("<unknown file>");
+ format!("file://{path}{line_col}")
+ };
+ let mut pass_count = 0;
+ let mut ignore_count = 0;
+ let mut fail_count = 0;
+ let mut sw_all = StopWatch::start();
+ for test in tests {
+ let full_name = full_name_of_item(db, test.module(db), test.name(db));
+ println!("test {}", full_name);
+ if test.is_ignore(db) {
+ println!("ignored");
+ ignore_count += 1;
+ continue;
+ }
+ let mut sw_one = StopWatch::start();
+ let result = test.eval(db, span_formatter);
+ if result.trim() == "pass" {
+ pass_count += 1;
+ } else {
+ fail_count += 1;
+ }
+ println!("{}", result);
+ eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed());
+ }
+ println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored");
+ eprintln!("{:<20} {}", "All tests", sw_all.elapsed());
+ Ok(())
+ }
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> = Crate::all(db)
+ .into_iter()
+ .filter(|x| x.origin(db).is_local())
+ .map(|krate| krate.root_module())
+ .collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index b0b724bdf..44337f955 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -6,27 +6,23 @@ use std::{
time::Instant,
};
-use crate::{
- cli::load_cargo::ProcMacroServerChoice,
- line_index::{LineEndings, LineIndex, PositionEncoding},
-};
use ide::{
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
TokenStaticData,
};
use ide_db::LineIndexDatabase;
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use scip::types as scip_types;
use std::env;
-use crate::cli::{
- flags,
- load_cargo::{load_workspace, LoadCargoConfig},
- Result,
+use crate::{
+ cli::flags,
+ line_index::{LineEndings, LineIndex, PositionEncoding},
};
impl flags::Scip {
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating SCIP start...");
let now = Instant::now();
let mut cargo_config = CargoConfig::default();
@@ -65,7 +61,7 @@ impl flags::Scip {
path.normalize()
.as_os_str()
.to_str()
- .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
+ .ok_or(anyhow::format_err!("Unable to normalize project_root path"))?
),
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
special_fields: Default::default(),
@@ -168,7 +164,7 @@ impl flags::Scip {
let out_path = self.output.unwrap_or_else(|| PathBuf::from(r"index.scip"));
scip::write_message_to_file(out_path, index)
- .map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
+ .map_err(|err| anyhow::format_err!("Failed to write scip to file: {}", err))?;
eprintln!("Generating SCIP finished {:?}", now.elapsed());
Ok(())
@@ -276,7 +272,7 @@ mod test {
let change_fixture = ChangeFixture::parse(ra_fixture);
host.raw_database_mut().apply_change(change_fixture.change);
let (file_id, range_or_offset) =
- change_fixture.file_position.expect("expected a marker ($0)");
+ change_fixture.file_position.expect("expected a marker ()");
let offset = range_or_offset.expect_offset();
(host, FilePosition { file_id, offset })
}
@@ -325,7 +321,7 @@ use foo::example_mod::func;
fn main() {
func$0();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod example_mod {
pub fn func() {}
}
@@ -338,7 +334,7 @@ pub mod example_mod {
fn symbol_for_trait() {
check_symbol(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func$0() {}
@@ -353,7 +349,7 @@ pub mod module {
fn symbol_for_trait_constant() {
check_symbol(
r#"
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
const MY_CONST$0: u8;
@@ -368,7 +364,7 @@ pub mod module {
fn symbol_for_trait_type() {
check_symbol(
r#"
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
type MyType$0;
@@ -384,7 +380,7 @@ pub mod module {
fn symbol_for_trait_impl_function() {
check_symbol(
r#"
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func() {}
@@ -411,7 +407,7 @@ pub mod module {
fn main() {
let x = St { a$0: 2 };
}
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub struct St {
pub a: i32,
}
@@ -421,6 +417,44 @@ pub mod module {
}
#[test]
+ fn symbol_for_param() {
+ check_symbol(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::example_mod::func;
+fn main() {
+ func(42);
+}
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
+pub mod example_mod {
+ pub fn func(x$0: usize) {}
+}
+"#,
+ "rust-analyzer cargo foo 0.1.0 example_mod/func().(x)",
+ );
+ }
+
+ #[test]
+ fn symbol_for_closure_param() {
+ check_symbol(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::example_mod::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
+pub mod example_mod {
+ pub fn func() {
+ let f = |x$0: usize| {};
+ }
+}
+"#,
+ "rust-analyzer cargo foo 0.1.0 example_mod/func().(x)",
+ );
+ }
+
+ #[test]
fn local_symbol_for_local() {
check_symbol(
r#"
@@ -429,7 +463,7 @@ pub mod module {
fn main() {
func();
}
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func() {
let x$0 = 2;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index 82a769347..f87dcb889 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -1,16 +1,14 @@
//! Applies structured search replace rules from the command line.
+use anyhow::Context;
use ide_ssr::MatchFinder;
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use project_model::{CargoConfig, RustLibSource};
-use crate::cli::{
- flags,
- load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice},
- Result,
-};
+use crate::cli::flags;
impl flags::Ssr {
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustLibSource::Discover);
@@ -35,7 +33,8 @@ impl flags::Ssr {
if let Some(path) = vfs.file_path(file_id).as_path() {
let mut contents = db.file_text(file_id).to_string();
edit.apply(&mut contents);
- std::fs::write(path, contents)?;
+ std::fs::write(path, contents)
+ .with_context(|| format!("failed to write {path}"))?;
}
}
Ok(())
@@ -46,7 +45,7 @@ impl flags::Search {
/// Searches for `patterns`, printing debug information for any nodes whose text exactly matches
/// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
/// for much else.
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::symbol_index::SymbolsDatabase;
let cargo_config = CargoConfig::default();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 6355c620f..fa20c796e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -1079,6 +1079,7 @@ impl Config {
pub fn diagnostics(&self) -> DiagnosticsConfig {
DiagnosticsConfig {
+ enabled: self.data.diagnostics_enable,
proc_attr_macros_enabled: self.expand_proc_attr_macros(),
proc_macros_enabled: self.data.procMacro_enable,
disable_experimental: !self.data.diagnostics_experimental_enable,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
index 4e57c6eb6..5e5cd9a02 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
@@ -10,7 +10,7 @@ use crate::{
global_state::{GlobalState, GlobalStateSnapshot},
main_loop::Task,
version::version,
- LspError, Result,
+ LspError,
};
/// A visitor for routing a raw JSON request to an appropriate handler function.
@@ -32,13 +32,13 @@ pub(crate) struct RequestDispatcher<'a> {
pub(crate) global_state: &'a mut GlobalState,
}
-impl<'a> RequestDispatcher<'a> {
+impl RequestDispatcher<'_> {
/// Dispatches the request onto the current thread, given full access to
/// mutable global state. Unlike all other methods here, this one isn't
/// guarded by `catch_unwind`, so, please, don't make bugs :-)
pub(crate) fn on_sync_mut<R>(
&mut self,
- f: fn(&mut GlobalState, R::Params) -> Result<R::Result>,
+ f: fn(&mut GlobalState, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request,
@@ -63,7 +63,7 @@ impl<'a> RequestDispatcher<'a> {
/// Dispatches the request onto the current thread.
pub(crate) fn on_sync<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request,
@@ -92,7 +92,7 @@ impl<'a> RequestDispatcher<'a> {
/// without retrying it if it panics.
pub(crate) fn on_no_retry<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -128,7 +128,7 @@ impl<'a> RequestDispatcher<'a> {
/// Dispatches a non-latency-sensitive request onto the thread pool.
pub(crate) fn on<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -141,7 +141,7 @@ impl<'a> RequestDispatcher<'a> {
/// Dispatches a latency-sensitive request onto the thread pool.
pub(crate) fn on_latency_sensitive<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -156,7 +156,7 @@ impl<'a> RequestDispatcher<'a> {
/// We can't run this on the main thread though as we invoke rustfmt which may take arbitrary time to complete!
pub(crate) fn on_fmt_thread<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -181,7 +181,7 @@ impl<'a> RequestDispatcher<'a> {
fn on_with_thread_intent<const MAIN_POOL: bool, R>(
&mut self,
intent: ThreadIntent,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -245,7 +245,7 @@ impl<'a> RequestDispatcher<'a> {
fn thread_result_to_response<R>(
id: lsp_server::RequestId,
- result: thread::Result<Result<R::Result>>,
+ result: thread::Result<anyhow::Result<R::Result>>,
) -> Result<lsp_server::Response, Cancelled>
where
R: lsp_types::request::Request,
@@ -277,7 +277,7 @@ where
fn result_to_response<R>(
id: lsp_server::RequestId,
- result: Result<R::Result>,
+ result: anyhow::Result<R::Result>,
) -> Result<lsp_server::Response, Cancelled>
where
R: lsp_types::request::Request,
@@ -289,7 +289,7 @@ where
Err(e) => match e.downcast::<LspError>() {
Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message),
Err(e) => match e.downcast::<Cancelled>() {
- Ok(cancelled) => return Err(*cancelled),
+ Ok(cancelled) => return Err(cancelled),
Err(e) => lsp_server::Response::new_err(
id,
lsp_server::ErrorCode::InternalError as i32,
@@ -306,11 +306,11 @@ pub(crate) struct NotificationDispatcher<'a> {
pub(crate) global_state: &'a mut GlobalState,
}
-impl<'a> NotificationDispatcher<'a> {
+impl NotificationDispatcher<'_> {
pub(crate) fn on_sync_mut<N>(
&mut self,
- f: fn(&mut GlobalState, N::Params) -> Result<()>,
- ) -> Result<&mut Self>
+ f: fn(&mut GlobalState, N::Params) -> anyhow::Result<()>,
+ ) -> anyhow::Result<&mut Self>
where
N: lsp_types::notification::Notification,
N::Params: DeserializeOwned + Send,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
index cd74a5500..c247e1bb2 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
@@ -14,19 +14,21 @@ use crate::{
line_index::{LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
- Result,
};
-pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
- let path = url.to_file_path().map_err(|()| "url is not a file")?;
+pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> {
+ let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?;
Ok(AbsPathBuf::try_from(path).unwrap())
}
-pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
+pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result<vfs::VfsPath> {
abs_path(url).map(vfs::VfsPath::from)
}
-pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
+pub(crate) fn offset(
+ line_index: &LineIndex,
+ position: lsp_types::Position,
+) -> anyhow::Result<TextSize> {
let line_col = match line_index.encoding {
PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character },
PositionEncoding::Wide(enc) => {
@@ -42,7 +44,10 @@ pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> R
Ok(text_size)
}
-pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> {
+pub(crate) fn text_range(
+ line_index: &LineIndex,
+ range: lsp_types::Range,
+) -> anyhow::Result<TextRange> {
let start = offset(line_index, range.start)?;
let end = offset(line_index, range.end)?;
match end < start {
@@ -51,14 +56,14 @@ pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Res
}
}
-pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
+pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> anyhow::Result<FileId> {
snap.url_to_file_id(url)
}
pub(crate) fn file_position(
snap: &GlobalStateSnapshot,
tdpp: lsp_types::TextDocumentPositionParams,
-) -> Result<FilePosition> {
+) -> anyhow::Result<FilePosition> {
let file_id = file_id(snap, &tdpp.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = offset(&line_index, tdpp.position)?;
@@ -69,7 +74,7 @@ pub(crate) fn file_range(
snap: &GlobalStateSnapshot,
text_document_identifier: lsp_types::TextDocumentIdentifier,
range: lsp_types::Range,
-) -> Result<FileRange> {
+) -> anyhow::Result<FileRange> {
file_range_uri(snap, &text_document_identifier.uri, range)
}
@@ -77,7 +82,7 @@ pub(crate) fn file_range_uri(
snap: &GlobalStateSnapshot,
document: &lsp_types::Url,
range: lsp_types::Range,
-) -> Result<FileRange> {
+) -> anyhow::Result<FileRange> {
let file_id = file_id(snap, document)?;
let line_index = snap.file_line_index(file_id)?;
let range = text_range(&line_index, range)?;
@@ -101,7 +106,7 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind>
pub(crate) fn annotation(
snap: &GlobalStateSnapshot,
code_lens: lsp_types::CodeLens,
-) -> Result<Option<Annotation>> {
+) -> anyhow::Result<Option<Annotation>> {
let data =
code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?;
let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index d5b0e3a57..ea8a69751 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -9,6 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase};
+use load_cargo::SourceRootConfig;
use lsp_types::{SemanticTokens, Url};
use nohash_hasher::IntMap;
use parking_lot::{Mutex, RwLock};
@@ -27,10 +28,9 @@ use crate::{
main_loop::Task,
mem_docs::MemDocs,
op_queue::OpQueue,
- reload::{self, SourceRootConfig},
+ reload,
task_pool::TaskPool,
to_proto::url_from_abs_path,
- Result,
};
// Enforces drop order
@@ -319,7 +319,7 @@ impl GlobalState {
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029
if let Some((path, force_crate_graph_reload)) = workspace_structure_change {
self.fetch_workspaces_queue.request_op(
- format!("workspace vfs file change: {}", path.display()),
+ format!("workspace vfs file change: {path}"),
force_crate_graph_reload,
);
}
@@ -422,7 +422,7 @@ impl Drop for GlobalState {
}
impl GlobalStateSnapshot {
- pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> {
url_to_file_id(&self.vfs.read().0, url)
}
@@ -481,8 +481,8 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
url_from_abs_path(path)
}
-pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<FileId> {
let path = from_proto::vfs_path(url)?;
- let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?;
+ let res = vfs.file_id(&path).ok_or_else(|| anyhow::format_err!("file not found: {path}"))?;
Ok(res)
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index ae1dc2315..e830e5e9a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -14,10 +14,10 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath};
use crate::{
config::Config, from_proto, global_state::GlobalState, lsp_ext::RunFlycheckParams,
- lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, Result,
+ lsp_utils::apply_document_changes, mem_docs::DocumentData, reload,
};
-pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Result<()> {
+pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> {
let id: lsp_server::RequestId = match params.id {
lsp_types::NumberOrString::Number(id) => id.into(),
lsp_types::NumberOrString::String(id) => id.into(),
@@ -29,7 +29,7 @@ pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Re
pub(crate) fn handle_work_done_progress_cancel(
state: &mut GlobalState,
params: WorkDoneProgressCancelParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
if let Ok(id) = u32::from_str_radix(id, 10) {
@@ -49,7 +49,7 @@ pub(crate) fn handle_work_done_progress_cancel(
pub(crate) fn handle_did_open_text_document(
state: &mut GlobalState,
params: DidOpenTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_did_open_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
@@ -68,7 +68,7 @@ pub(crate) fn handle_did_open_text_document(
pub(crate) fn handle_did_change_text_document(
state: &mut GlobalState,
params: DidChangeTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_did_change_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
@@ -100,7 +100,7 @@ pub(crate) fn handle_did_change_text_document(
pub(crate) fn handle_did_close_text_document(
state: &mut GlobalState,
params: DidCloseTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_did_close_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
@@ -120,14 +120,14 @@ pub(crate) fn handle_did_close_text_document(
pub(crate) fn handle_did_save_text_document(
state: &mut GlobalState,
params: DidSaveTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
state
.fetch_workspaces_queue
- .request_op(format!("DidSaveTextDocument {}", abs_path.display()), false);
+ .request_op(format!("DidSaveTextDocument {abs_path}"), false);
}
}
@@ -146,7 +146,7 @@ pub(crate) fn handle_did_save_text_document(
pub(crate) fn handle_did_change_configuration(
state: &mut GlobalState,
_params: DidChangeConfigurationParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
// this notification's parameters should be ignored and the actual config queried separately.
state.send_request::<lsp_types::request::WorkspaceConfiguration>(
@@ -186,7 +186,7 @@ pub(crate) fn handle_did_change_configuration(
pub(crate) fn handle_did_change_workspace_folders(
state: &mut GlobalState,
params: DidChangeWorkspaceFoldersParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let config = Arc::make_mut(&mut state.config);
for workspace in params.event.removed {
@@ -214,7 +214,7 @@ pub(crate) fn handle_did_change_workspace_folders(
pub(crate) fn handle_did_change_watched_files(
state: &mut GlobalState,
params: DidChangeWatchedFilesParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
for change in params.changes {
if let Ok(path) = from_proto::abs_path(&change.uri) {
state.loader.handle.invalidate(path);
@@ -302,13 +302,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
}
-pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
let _p = profile::span("handle_stop_flycheck");
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
Ok(())
}
-pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
let _p = profile::span("handle_clear_flycheck");
state.diagnostics.clear_check_all();
Ok(())
@@ -317,7 +317,7 @@ pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()
pub(crate) fn handle_run_flycheck(
state: &mut GlobalState,
params: RunFlycheckParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_run_flycheck");
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index a6a72552d..5f1f731cf 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -8,6 +8,7 @@ use std::{
};
use anyhow::Context;
+
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind,
@@ -20,9 +21,9 @@ use lsp_types::{
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
CodeLens, CompletionItem, FoldingRange, FoldingRangeParams, HoverContents, InlayHint,
InlayHintParams, Location, LocationLink, Position, PrepareRenameResponse, Range, RenameParams,
- SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
- SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
- SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+ ResourceOp, ResourceOperationKind, SemanticTokensDeltaParams, SemanticTokensFullDeltaResult,
+ SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
+ SemanticTokensResult, SymbolInformation, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
};
use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
@@ -33,7 +34,7 @@ use vfs::{AbsPath, AbsPathBuf, VfsPath};
use crate::{
cargo_target_spec::CargoTargetSpec,
- config::{RustfmtConfig, WorkspaceSymbolConfig},
+ config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
diff::diff,
from_proto,
global_state::{GlobalState, GlobalStateSnapshot},
@@ -43,10 +44,10 @@ use crate::{
FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams,
},
lsp_utils::{all_edits_are_disjoint, invalid_params_error},
- to_proto, LspError, Result,
+ to_proto, LspError,
};
-pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
// FIXME: use `Arc::from_iter` when it becomes available
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
@@ -55,7 +56,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<
Ok(())
}
-pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
// FIXME: use `Arc::from_iter` when it becomes available
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
@@ -67,7 +68,7 @@ pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Resu
pub(crate) fn handle_analyzer_status(
snap: GlobalStateSnapshot,
params: lsp_ext::AnalyzerStatusParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_analyzer_status");
let mut buf = String::new();
@@ -112,7 +113,7 @@ pub(crate) fn handle_analyzer_status(
Ok(buf)
}
-pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
+pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Result<String> {
let _p = profile::span("handle_memory_usage");
let mem = state.analysis_host.per_query_memory_usage();
@@ -125,7 +126,7 @@ pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<Stri
Ok(out)
}
-pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
state.analysis_host.shuffle_crate_graph();
Ok(())
}
@@ -133,7 +134,7 @@ pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Resu
pub(crate) fn handle_syntax_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::SyntaxTreeParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_syntax_tree");
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(id)?;
@@ -145,7 +146,7 @@ pub(crate) fn handle_syntax_tree(
pub(crate) fn handle_view_hir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_hir");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_hir(position)?;
@@ -155,7 +156,7 @@ pub(crate) fn handle_view_hir(
pub(crate) fn handle_view_mir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_mir");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_mir(position)?;
@@ -165,7 +166,7 @@ pub(crate) fn handle_view_mir(
pub(crate) fn handle_interpret_function(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_interpret_function");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.interpret_function(position)?;
@@ -175,7 +176,7 @@ pub(crate) fn handle_interpret_function(
pub(crate) fn handle_view_file_text(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentIdentifier,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let file_id = from_proto::file_id(&snap, &params.uri)?;
Ok(snap.analysis.file_text(file_id)?.to_string())
}
@@ -183,7 +184,7 @@ pub(crate) fn handle_view_file_text(
pub(crate) fn handle_view_item_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::ViewItemTreeParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_item_tree");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let res = snap.analysis.view_item_tree(file_id)?;
@@ -193,16 +194,16 @@ pub(crate) fn handle_view_item_tree(
pub(crate) fn handle_view_crate_graph(
snap: GlobalStateSnapshot,
params: ViewCrateGraphParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_crate_graph");
- let dot = snap.analysis.view_crate_graph(params.full)??;
+ let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?;
Ok(dot)
}
pub(crate) fn handle_expand_macro(
snap: GlobalStateSnapshot,
params: lsp_ext::ExpandMacroParams,
-) -> Result<Option<lsp_ext::ExpandedMacro>> {
+) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> {
let _p = profile::span("handle_expand_macro");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -215,11 +216,11 @@ pub(crate) fn handle_expand_macro(
pub(crate) fn handle_selection_range(
snap: GlobalStateSnapshot,
params: lsp_types::SelectionRangeParams,
-) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = profile::span("handle_selection_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
- let res: Result<Vec<lsp_types::SelectionRange>> = params
+ let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params
.positions
.into_iter()
.map(|position| {
@@ -258,7 +259,7 @@ pub(crate) fn handle_selection_range(
pub(crate) fn handle_matching_brace(
snap: GlobalStateSnapshot,
params: lsp_ext::MatchingBraceParams,
-) -> Result<Vec<Position>> {
+) -> anyhow::Result<Vec<Position>> {
let _p = profile::span("handle_matching_brace");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -281,7 +282,7 @@ pub(crate) fn handle_matching_brace(
pub(crate) fn handle_join_lines(
snap: GlobalStateSnapshot,
params: lsp_ext::JoinLinesParams,
-) -> Result<Vec<lsp_types::TextEdit>> {
+) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
let _p = profile::span("handle_join_lines");
let config = snap.config.join_lines();
@@ -306,7 +307,7 @@ pub(crate) fn handle_join_lines(
pub(crate) fn handle_on_enter(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = profile::span("handle_on_enter");
let position = from_proto::file_position(&snap, params)?;
let edit = match snap.analysis.on_enter(position)? {
@@ -321,7 +322,7 @@ pub(crate) fn handle_on_enter(
pub(crate) fn handle_on_type_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentOnTypeFormattingParams,
-) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = profile::span("handle_on_type_formatting");
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -352,7 +353,8 @@ pub(crate) fn handle_on_type_formatting(
};
// This should be a single-file edit
- let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
+ let (_, (text_edit, snippet_edit)) = edit.source_file_edits.into_iter().next().unwrap();
+ stdx::never!(snippet_edit.is_none(), "on type formatting shouldn't use structured snippets");
let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
Ok(Some(change))
@@ -361,7 +363,7 @@ pub(crate) fn handle_on_type_formatting(
pub(crate) fn handle_document_symbol(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentSymbolParams,
-) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
+) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = profile::span("handle_document_symbol");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -450,7 +452,7 @@ pub(crate) fn handle_document_symbol(
pub(crate) fn handle_workspace_symbol(
snap: GlobalStateSnapshot,
params: WorkspaceSymbolParams,
-) -> Result<Option<Vec<SymbolInformation>>> {
+) -> anyhow::Result<Option<Vec<SymbolInformation>>> {
let _p = profile::span("handle_workspace_symbol");
let config = snap.config.workspace_symbol();
@@ -513,7 +515,10 @@ pub(crate) fn handle_workspace_symbol(
(all_symbols, libs)
}
- fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
+ fn exec_query(
+ snap: &GlobalStateSnapshot,
+ query: Query,
+ ) -> anyhow::Result<Vec<SymbolInformation>> {
let mut res = Vec::new();
for nav in snap.analysis.symbol_search(query)? {
let container_name = nav.container_name.as_ref().map(|v| v.to_string());
@@ -542,7 +547,7 @@ pub(crate) fn handle_workspace_symbol(
pub(crate) fn handle_will_rename_files(
snap: GlobalStateSnapshot,
params: lsp_types::RenameFilesParams,
-) -> Result<Option<lsp_types::WorkspaceEdit>> {
+) -> anyhow::Result<Option<lsp_types::WorkspaceEdit>> {
let _p = profile::span("handle_will_rename_files");
let source_changes: Vec<SourceChange> = params
@@ -604,7 +609,7 @@ pub(crate) fn handle_will_rename_files(
pub(crate) fn handle_goto_definition(
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
-) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile::span("handle_goto_definition");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_definition(position)? {
@@ -619,7 +624,7 @@ pub(crate) fn handle_goto_definition(
pub(crate) fn handle_goto_declaration(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoDeclarationParams,
-) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
+) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> {
let _p = profile::span("handle_goto_declaration");
let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
let nav_info = match snap.analysis.goto_declaration(position)? {
@@ -634,7 +639,7 @@ pub(crate) fn handle_goto_declaration(
pub(crate) fn handle_goto_implementation(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoImplementationParams,
-) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
+) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> {
let _p = profile::span("handle_goto_implementation");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_implementation(position)? {
@@ -649,7 +654,7 @@ pub(crate) fn handle_goto_implementation(
pub(crate) fn handle_goto_type_definition(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoTypeDefinitionParams,
-) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
let _p = profile::span("handle_goto_type_definition");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_type_definition(position)? {
@@ -664,7 +669,7 @@ pub(crate) fn handle_goto_type_definition(
pub(crate) fn handle_parent_module(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile::span("handle_parent_module");
if let Ok(file_path) = &params.text_document.uri.to_file_path() {
if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
@@ -731,7 +736,7 @@ pub(crate) fn handle_parent_module(
pub(crate) fn handle_runnables(
snap: GlobalStateSnapshot,
params: lsp_ext::RunnablesParams,
-) -> Result<Vec<lsp_ext::Runnable>> {
+) -> anyhow::Result<Vec<lsp_ext::Runnable>> {
let _p = profile::span("handle_runnables");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -826,7 +831,7 @@ fn should_skip_for_offset(runnable: &Runnable, offset: Option<TextSize>) -> bool
pub(crate) fn handle_related_tests(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Vec<lsp_ext::TestInfo>> {
+) -> anyhow::Result<Vec<lsp_ext::TestInfo>> {
let _p = profile::span("handle_related_tests");
let position = from_proto::file_position(&snap, params)?;
@@ -844,7 +849,7 @@ pub(crate) fn handle_related_tests(
pub(crate) fn handle_completion(
snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
-) -> Result<Option<lsp_types::CompletionResponse>> {
+) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
let _p = profile::span("handle_completion");
let text_document_position = params.text_document_position.clone();
let position = from_proto::file_position(&snap, params.text_document_position)?;
@@ -872,7 +877,7 @@ pub(crate) fn handle_completion(
pub(crate) fn handle_completion_resolve(
snap: GlobalStateSnapshot,
mut original_completion: CompletionItem,
-) -> Result<CompletionItem> {
+) -> anyhow::Result<CompletionItem> {
let _p = profile::span("handle_completion_resolve");
if !all_edits_are_disjoint(&original_completion, &[]) {
@@ -928,7 +933,7 @@ pub(crate) fn handle_completion_resolve(
pub(crate) fn handle_folding_range(
snap: GlobalStateSnapshot,
params: FoldingRangeParams,
-) -> Result<Option<Vec<FoldingRange>>> {
+) -> anyhow::Result<Option<Vec<FoldingRange>>> {
let _p = profile::span("handle_folding_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis.folding_ranges(file_id)?;
@@ -945,7 +950,7 @@ pub(crate) fn handle_folding_range(
pub(crate) fn handle_signature_help(
snap: GlobalStateSnapshot,
params: lsp_types::SignatureHelpParams,
-) -> Result<Option<lsp_types::SignatureHelp>> {
+) -> anyhow::Result<Option<lsp_types::SignatureHelp>> {
let _p = profile::span("handle_signature_help");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let help = match snap.analysis.signature_help(position)? {
@@ -960,7 +965,7 @@ pub(crate) fn handle_signature_help(
pub(crate) fn handle_hover(
snap: GlobalStateSnapshot,
params: lsp_ext::HoverParams,
-) -> Result<Option<lsp_ext::Hover>> {
+) -> anyhow::Result<Option<lsp_ext::Hover>> {
let _p = profile::span("handle_hover");
let range = match params.position {
PositionOrRange::Position(position) => Range::new(position, position),
@@ -997,7 +1002,7 @@ pub(crate) fn handle_hover(
pub(crate) fn handle_prepare_rename(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Option<PrepareRenameResponse>> {
+) -> anyhow::Result<Option<PrepareRenameResponse>> {
let _p = profile::span("handle_prepare_rename");
let position = from_proto::file_position(&snap, params)?;
@@ -1011,7 +1016,7 @@ pub(crate) fn handle_prepare_rename(
pub(crate) fn handle_rename(
snap: GlobalStateSnapshot,
params: RenameParams,
-) -> Result<Option<WorkspaceEdit>> {
+) -> anyhow::Result<Option<WorkspaceEdit>> {
let _p = profile::span("handle_rename");
let position = from_proto::file_position(&snap, params.text_document_position)?;
@@ -1027,14 +1032,30 @@ pub(crate) fn handle_rename(
if !change.file_system_edits.is_empty() && snap.config.will_rename() {
change.source_file_edits.clear();
}
+
let workspace_edit = to_proto::workspace_edit(&snap, change)?;
+
+ if let Some(lsp_types::DocumentChanges::Operations(ops)) =
+ workspace_edit.document_changes.as_ref()
+ {
+ for op in ops {
+ if let lsp_types::DocumentChangeOperation::Op(doc_change_op) = op {
+ if let Err(err) =
+ resource_ops_supported(&snap.config, resolve_resource_op(doc_change_op))
+ {
+ return Err(err);
+ }
+ }
+ }
+ }
+
Ok(Some(workspace_edit))
}
pub(crate) fn handle_references(
snap: GlobalStateSnapshot,
params: lsp_types::ReferenceParams,
-) -> Result<Option<Vec<Location>>> {
+) -> anyhow::Result<Option<Vec<Location>>> {
let _p = profile::span("handle_references");
let position = from_proto::file_position(&snap, params.text_document_position)?;
@@ -1077,7 +1098,7 @@ pub(crate) fn handle_references(
pub(crate) fn handle_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentFormattingParams,
-) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile::span("handle_formatting");
run_rustfmt(&snap, params.text_document, None)
@@ -1086,7 +1107,7 @@ pub(crate) fn handle_formatting(
pub(crate) fn handle_range_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentRangeFormattingParams,
-) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile::span("handle_range_formatting");
run_rustfmt(&snap, params.text_document, Some(params.range))
@@ -1095,7 +1116,7 @@ pub(crate) fn handle_range_formatting(
pub(crate) fn handle_code_action(
snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
-) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
+) -> anyhow::Result<Option<Vec<lsp_ext::CodeAction>>> {
let _p = profile::span("handle_code_action");
if !snap.config.code_action_literals() {
@@ -1134,6 +1155,21 @@ pub(crate) fn handle_code_action(
let resolve_data =
if code_action_resolve_cap { Some((index, params.clone())) } else { None };
let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
+
+ // Check if the client supports the necessary `ResourceOperation`s.
+ let changes = code_action.edit.as_ref().and_then(|it| it.document_changes.as_ref());
+ if let Some(changes) = changes {
+ for change in changes {
+ if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change {
+ if let Err(err) =
+ resource_ops_supported(&snap.config, resolve_resource_op(res_op))
+ {
+ return Err(err);
+ }
+ }
+ }
+ }
+
res.push(code_action)
}
@@ -1158,7 +1194,7 @@ pub(crate) fn handle_code_action(
pub(crate) fn handle_code_action_resolve(
snap: GlobalStateSnapshot,
mut code_action: lsp_ext::CodeAction,
-) -> Result<lsp_ext::CodeAction> {
+) -> anyhow::Result<lsp_ext::CodeAction> {
let _p = profile::span("handle_code_action_resolve");
let params = match code_action.data.take() {
Some(it) => it,
@@ -1216,10 +1252,25 @@ pub(crate) fn handle_code_action_resolve(
let ca = to_proto::code_action(&snap, assist.clone(), None)?;
code_action.edit = ca.edit;
code_action.command = ca.command;
+
+ if let Some(edit) = code_action.edit.as_ref() {
+ if let Some(changes) = edit.document_changes.as_ref() {
+ for change in changes {
+ if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change {
+ if let Err(err) =
+ resource_ops_supported(&snap.config, resolve_resource_op(res_op))
+ {
+ return Err(err);
+ }
+ }
+ }
+ }
+ }
+
Ok(code_action)
}
-fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
+fn parse_action_id(action_id: &str) -> anyhow::Result<(usize, SingleResolve), String> {
let id_parts = action_id.split(':').collect::<Vec<_>>();
match id_parts.as_slice() {
[assist_id_string, assist_kind_string, index_string] => {
@@ -1237,7 +1288,7 @@ fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
pub(crate) fn handle_code_lens(
snap: GlobalStateSnapshot,
params: lsp_types::CodeLensParams,
-) -> Result<Option<Vec<CodeLens>>> {
+) -> anyhow::Result<Option<Vec<CodeLens>>> {
let _p = profile::span("handle_code_lens");
let lens_config = snap.config.lens();
@@ -1280,8 +1331,10 @@ pub(crate) fn handle_code_lens(
pub(crate) fn handle_code_lens_resolve(
snap: GlobalStateSnapshot,
code_lens: CodeLens,
-) -> Result<CodeLens> {
- let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else { return Ok(code_lens) };
+) -> anyhow::Result<CodeLens> {
+ let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else {
+ return Ok(code_lens);
+ };
let annotation = snap.analysis.resolve_annotation(annotation)?;
let mut acc = Vec::new();
@@ -1301,7 +1354,7 @@ pub(crate) fn handle_code_lens_resolve(
pub(crate) fn handle_document_highlight(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentHighlightParams,
-) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> {
let _p = profile::span("handle_document_highlight");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -1323,7 +1376,7 @@ pub(crate) fn handle_document_highlight(
pub(crate) fn handle_ssr(
snap: GlobalStateSnapshot,
params: lsp_ext::SsrParams,
-) -> Result<lsp_types::WorkspaceEdit> {
+) -> anyhow::Result<lsp_types::WorkspaceEdit> {
let _p = profile::span("handle_ssr");
let selections = params
.selections
@@ -1343,7 +1396,7 @@ pub(crate) fn handle_ssr(
pub(crate) fn handle_inlay_hints(
snap: GlobalStateSnapshot,
params: InlayHintParams,
-) -> Result<Option<Vec<InlayHint>>> {
+) -> anyhow::Result<Option<Vec<InlayHint>>> {
let _p = profile::span("handle_inlay_hints");
let document_uri = &params.text_document.uri;
let FileRange { file_id, range } = from_proto::file_range(
@@ -1365,7 +1418,7 @@ pub(crate) fn handle_inlay_hints(
pub(crate) fn handle_inlay_hints_resolve(
_snap: GlobalStateSnapshot,
hint: InlayHint,
-) -> Result<InlayHint> {
+) -> anyhow::Result<InlayHint> {
let _p = profile::span("handle_inlay_hints_resolve");
Ok(hint)
}
@@ -1373,7 +1426,7 @@ pub(crate) fn handle_inlay_hints_resolve(
pub(crate) fn handle_call_hierarchy_prepare(
snap: GlobalStateSnapshot,
params: CallHierarchyPrepareParams,
-) -> Result<Option<Vec<CallHierarchyItem>>> {
+) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> {
let _p = profile::span("handle_call_hierarchy_prepare");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
@@ -1395,7 +1448,7 @@ pub(crate) fn handle_call_hierarchy_prepare(
pub(crate) fn handle_call_hierarchy_incoming(
snap: GlobalStateSnapshot,
params: CallHierarchyIncomingCallsParams,
-) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
+) -> anyhow::Result<Option<Vec<CallHierarchyIncomingCall>>> {
let _p = profile::span("handle_call_hierarchy_incoming");
let item = params.item;
@@ -1430,7 +1483,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
pub(crate) fn handle_call_hierarchy_outgoing(
snap: GlobalStateSnapshot,
params: CallHierarchyOutgoingCallsParams,
-) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
+) -> anyhow::Result<Option<Vec<CallHierarchyOutgoingCall>>> {
let _p = profile::span("handle_call_hierarchy_outgoing");
let item = params.item;
@@ -1465,7 +1518,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
pub(crate) fn handle_semantic_tokens_full(
snap: GlobalStateSnapshot,
params: SemanticTokensParams,
-) -> Result<Option<SemanticTokensResult>> {
+) -> anyhow::Result<Option<SemanticTokensResult>> {
let _p = profile::span("handle_semantic_tokens_full");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -1495,7 +1548,7 @@ pub(crate) fn handle_semantic_tokens_full(
pub(crate) fn handle_semantic_tokens_full_delta(
snap: GlobalStateSnapshot,
params: SemanticTokensDeltaParams,
-) -> Result<Option<SemanticTokensFullDeltaResult>> {
+) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> {
let _p = profile::span("handle_semantic_tokens_full_delta");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -1535,7 +1588,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
pub(crate) fn handle_semantic_tokens_range(
snap: GlobalStateSnapshot,
params: SemanticTokensRangeParams,
-) -> Result<Option<SemanticTokensRangeResult>> {
+) -> anyhow::Result<Option<SemanticTokensRangeResult>> {
let _p = profile::span("handle_semantic_tokens_range");
let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
@@ -1561,7 +1614,7 @@ pub(crate) fn handle_semantic_tokens_range(
pub(crate) fn handle_open_docs(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<ExternalDocsResponse> {
+) -> anyhow::Result<ExternalDocsResponse> {
let _p = profile::span("handle_open_docs");
let position = from_proto::file_position(&snap, params)?;
@@ -1582,9 +1635,9 @@ pub(crate) fn handle_open_docs(
let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else {
return if snap.config.local_docs() {
Ok(ExternalDocsResponse::WithLocal(Default::default()))
- } else {
+ } else {
Ok(ExternalDocsResponse::Simple(None))
- }
+ };
};
let web = remote_urls.web_url.and_then(|it| Url::parse(&it).ok());
@@ -1600,7 +1653,7 @@ pub(crate) fn handle_open_docs(
pub(crate) fn handle_open_cargo_toml(
snap: GlobalStateSnapshot,
params: lsp_ext::OpenCargoTomlParams,
-) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile::span("handle_open_cargo_toml");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -1618,7 +1671,7 @@ pub(crate) fn handle_open_cargo_toml(
pub(crate) fn handle_move_item(
snap: GlobalStateSnapshot,
params: lsp_ext::MoveItemParams,
-) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
+) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> {
let _p = profile::span("handle_move_item");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let range = from_proto::file_range(&snap, params.text_document, params.range)?;
@@ -1637,6 +1690,34 @@ pub(crate) fn handle_move_item(
}
}
+pub(crate) fn handle_view_recursive_memory_layout(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> {
+ let _p = profile::span("view_recursive_memory_layout");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, params.position)?;
+
+ let res = snap.analysis.get_recursive_memory_layout(FilePosition { file_id, offset })?;
+ Ok(res.map(|it| lsp_ext::RecursiveMemoryLayout {
+ nodes: it
+ .nodes
+ .iter()
+ .map(|n| lsp_ext::MemoryLayoutNode {
+ item_name: n.item_name.clone(),
+ typename: n.typename.clone(),
+ size: n.size,
+ offset: n.offset,
+ alignment: n.alignment,
+ parent_idx: n.parent_idx,
+ children_start: n.children_start,
+ children_len: n.children_len,
+ })
+ .collect(),
+ }))
+}
+
fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
lsp_ext::CommandLink { tooltip: Some(tooltip), command }
}
@@ -1792,7 +1873,7 @@ fn run_rustfmt(
snap: &GlobalStateSnapshot,
text_document: TextDocumentIdentifier,
range: Option<lsp_types::Range>,
-) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
let file_id = from_proto::file_id(snap, &text_document.uri)?;
let file = snap.analysis.file_text(file_id)?;
@@ -1942,7 +2023,7 @@ fn run_rustfmt(
pub(crate) fn fetch_dependency_list(
state: GlobalStateSnapshot,
_params: FetchDependencyListParams,
-) -> Result<FetchDependencyListResult> {
+) -> anyhow::Result<FetchDependencyListResult> {
let crates = state.analysis.fetch_crates()?;
let crate_infos = crates
.into_iter()
@@ -1987,3 +2068,42 @@ fn to_url(path: VfsPath) -> Option<Url> {
let str_path = path.as_os_str().to_str()?;
Url::from_file_path(str_path).ok()
}
+
+fn resource_ops_supported(config: &Config, kind: ResourceOperationKind) -> anyhow::Result<()> {
+ #[rustfmt::skip]
+ let resops = (|| {
+ config
+ .caps()
+ .workspace
+ .as_ref()?
+ .workspace_edit
+ .as_ref()?
+ .resource_operations
+ .as_ref()
+ })();
+
+ if !matches!(resops, Some(resops) if resops.contains(&kind)) {
+ return Err(LspError::new(
+ ErrorCode::RequestFailed as i32,
+ format!(
+ "Client does not support {} capability.",
+ match kind {
+ ResourceOperationKind::Create => "create",
+ ResourceOperationKind::Rename => "rename",
+ ResourceOperationKind::Delete => "delete",
+ }
+ ),
+ )
+ .into());
+ }
+
+ Ok(())
+}
+
+fn resolve_resource_op(op: &ResourceOp) -> ResourceOperationKind {
+ match op {
+ ResourceOp::Create(_) => ResourceOperationKind::Create,
+ ResourceOp::Rename(_) => ResourceOperationKind::Rename,
+ ResourceOp::Delete(_) => ResourceOperationKind::Delete,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index bd9f471a4..5a11012b9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -20,7 +20,7 @@ use test_utils::project_root;
use triomphe::Arc;
use vfs::{AbsPathBuf, VfsPath};
-use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
#[test]
fn integrated_highlighting_benchmark() {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
index 65de4366e..57e26c241 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -54,13 +54,12 @@ use serde::de::DeserializeOwned;
pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version};
-pub type Error = Box<dyn std::error::Error + Send + Sync>;
-pub type Result<T, E = Error> = std::result::Result<T, E>;
-
-pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> {
- let res = serde_json::from_value(json.clone())
- .map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?;
- Ok(res)
+pub fn from_json<T: DeserializeOwned>(
+ what: &'static str,
+ json: &serde_json::Value,
+) -> anyhow::Result<T> {
+ serde_json::from_value(json.clone())
+ .map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
index 4d67c8b30..d0989b323 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
@@ -182,6 +182,33 @@ pub struct ExpandedMacro {
pub expansion: String,
}
+pub enum ViewRecursiveMemoryLayout {}
+
+impl Request for ViewRecursiveMemoryLayout {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<RecursiveMemoryLayout>;
+ const METHOD: &'static str = "rust-analyzer/viewRecursiveMemoryLayout";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct RecursiveMemoryLayout {
+ pub nodes: Vec<MemoryLayoutNode>,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct MemoryLayoutNode {
+ pub item_name: String,
+ pub typename: String,
+ pub size: u64,
+ pub offset: u64,
+ pub alignment: u64,
+ pub parent_idx: i64,
+ pub children_start: i64,
+ pub children_len: u64,
+}
+
pub enum CancelFlycheck {}
impl Notification for CancelFlycheck {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 02dd94e5f..74036710f 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -23,10 +23,9 @@ use crate::{
lsp_ext,
lsp_utils::{notification_is, Progress},
reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
- Result,
};
-pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
+pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> {
tracing::info!("initial config: {:#?}", config);
// Windows scheduler implements priority boosts: if thread waits for an
@@ -109,7 +108,7 @@ impl fmt::Debug for Event {
}
impl GlobalState {
- fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
+ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> anyhow::Result<()> {
self.update_status_or_notify();
if self.config.did_save_text_document_dynamic_registration() {
@@ -134,7 +133,7 @@ impl GlobalState {
self.handle_event(event)?;
}
- Err("client exited without proper shutdown sequence".into())
+ anyhow::bail!("client exited without proper shutdown sequence")
}
fn register_did_save_capability(&mut self) {
@@ -191,7 +190,7 @@ impl GlobalState {
}
}
- fn handle_event(&mut self, event: Event) -> Result<()> {
+ fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
let loop_start = Instant::now();
// NOTE: don't count blocking select! call as a loop-turn time
let _p = profile::span("GlobalState::handle_event");
@@ -754,11 +753,12 @@ impl GlobalState {
)
.on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
.on::<lsp_ext::Ssr>(handlers::handle_ssr)
+ .on::<lsp_ext::ViewRecursiveMemoryLayout>(handlers::handle_view_recursive_memory_layout)
.finish();
}
/// Handles an incoming notification.
- fn on_notification(&mut self, not: Notification) -> Result<()> {
+ fn on_notification(&mut self, not: Notification) -> anyhow::Result<()> {
use crate::handlers::notification as handlers;
use lsp_types::notification as notifs;
@@ -843,11 +843,7 @@ impl GlobalState {
d.code.as_str().to_string(),
)),
code_description: Some(lsp_types::CodeDescription {
- href: lsp_types::Url::parse(&format!(
- "https://rust-analyzer.github.io/manual.html#{}",
- d.code.as_str()
- ))
- .unwrap(),
+ href: lsp_types::Url::parse(&d.code.url()).unwrap(),
}),
source: Some("rust-analyzer".to_string()),
message: d.message,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index 310c6b076..0a2bb8224 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -12,26 +12,22 @@
//! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
-use std::{collections::hash_map::Entry, iter, mem, sync};
+use std::{iter, mem};
use flycheck::{FlycheckConfig, FlycheckHandle};
use hir::db::DefDatabase;
use ide::Change;
use ide_db::{
- base_db::{
- salsa::Durability, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
- ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, VfsPath,
- },
+ base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros},
FxHashMap,
};
-use itertools::Itertools;
-use proc_macro_api::{MacroDylib, ProcMacroServer};
-use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
+use load_cargo::{load_proc_macro, ProjectFolders};
+use proc_macro_api::ProcMacroServer;
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
use rustc_hash::FxHashSet;
use stdx::{format_to, thread::ThreadIntent};
-use syntax::SmolStr;
use triomphe::Arc;
-use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
+use vfs::{AbsPath, ChangeKind};
use crate::{
config::{Config, FilesWatcher, LinkedProject},
@@ -41,8 +37,6 @@ use crate::{
op_queue::Cause,
};
-use ::tt::token_id as tt;
-
#[derive(Debug)]
pub(crate) enum ProjectWorkspaceProgress {
Begin,
@@ -120,6 +114,11 @@ impl GlobalState {
if self.proc_macro_clients.iter().any(|it| it.is_err()) {
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to spawn one or more proc-macro servers.\n\n");
+ for err in self.proc_macro_clients.iter() {
+ if let Err(err) = err {
+ format_to!(message, "- {err}\n");
+ }
+ }
}
if !self.config.cargo_autoreload()
&& self.is_quiescent()
@@ -307,7 +306,7 @@ impl GlobalState {
res.map_or_else(
|_| Err("proc macro crate is missing dylib".to_owned()),
|(crate_name, path)| {
- progress(path.display().to_string());
+ progress(path.to_string());
client.as_ref().map_err(Clone::clone).and_then(|client| {
load_proc_macro(
client,
@@ -340,7 +339,11 @@ impl GlobalState {
let _p = profile::span("GlobalState::switch_workspaces");
tracing::info!(%cause, "will switch workspaces");
- let Some((workspaces, force_reload_crate_graph)) = self.fetch_workspaces_queue.last_op_result() else { return; };
+ let Some((workspaces, force_reload_crate_graph)) =
+ self.fetch_workspaces_queue.last_op_result()
+ else {
+ return;
+ };
if let Err(_) = self.fetch_workspace_error() {
if !self.workspaces.is_empty() {
@@ -407,9 +410,9 @@ impl GlobalState {
.flat_map(|root| {
root.include.into_iter().flat_map(|it| {
[
- format!("{}/**/*.rs", it.display()),
- format!("{}/**/Cargo.toml", it.display()),
- format!("{}/**/Cargo.lock", it.display()),
+ format!("{it}/**/*.rs"),
+ format!("{it}/**/Cargo.toml"),
+ format!("{it}/**/Cargo.lock"),
]
})
})
@@ -447,17 +450,13 @@ impl GlobalState {
None => ws.find_sysroot_proc_macro_srv()?,
};
- tracing::info!("Using proc-macro server at {}", path.display(),);
+ tracing::info!("Using proc-macro server at {path}");
ProcMacroServer::spawn(path.clone()).map_err(|err| {
tracing::error!(
- "Failed to run proc-macro server from path {}, error: {:?}",
- path.display(),
- err
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
);
- anyhow::anyhow!(
- "Failed to run proc-macro server from path {}, error: {:?}",
- path.display(),
- err
+ anyhow::format_err!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
)
})
})
@@ -534,7 +533,9 @@ impl GlobalState {
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
let mut buf = String::new();
- let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
+ let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else {
+ return Ok(());
+ };
if last_op_result.is_empty() {
stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
} else {
@@ -621,254 +622,6 @@ impl GlobalState {
}
}
-#[derive(Default)]
-pub(crate) struct ProjectFolders {
- pub(crate) load: Vec<vfs::loader::Entry>,
- pub(crate) watch: Vec<usize>,
- pub(crate) source_root_config: SourceRootConfig,
-}
-
-impl ProjectFolders {
- pub(crate) fn new(
- workspaces: &[ProjectWorkspace],
- global_excludes: &[AbsPathBuf],
- ) -> ProjectFolders {
- let mut res = ProjectFolders::default();
- let mut fsc = FileSetConfig::builder();
- let mut local_filesets = vec![];
-
- // Dedup source roots
- // Depending on the project setup, we can have duplicated source roots, or for example in
- // the case of the rustc workspace, we can end up with two source roots that are almost the
- // same but not quite, like:
- // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
- // PackageRoot {
- // is_local: true,
- // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
- // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
- // }
- //
- // The first one comes from the explicit rustc workspace which points to the rustc workspace itself
- // The second comes from the rustc workspace that we load as the actual project workspace
- // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
- // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
- // so we need to also coalesce the includes if they overlap.
-
- let mut roots: Vec<_> = workspaces
- .iter()
- .flat_map(|ws| ws.to_roots())
- .update(|root| root.include.sort())
- .sorted_by(|a, b| a.include.cmp(&b.include))
- .collect();
-
- // map that tracks indices of overlapping roots
- let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
- let mut done = false;
-
- while !mem::replace(&mut done, true) {
- // maps include paths to indices of the corresponding root
- let mut include_to_idx = FxHashMap::default();
- // Find and note down the indices of overlapping roots
- for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
- for include in &root.include {
- match include_to_idx.entry(include) {
- Entry::Occupied(e) => {
- overlap_map.entry(*e.get()).or_default().push(idx);
- }
- Entry::Vacant(e) => {
- e.insert(idx);
- }
- }
- }
- }
- for (k, v) in overlap_map.drain() {
- done = false;
- for v in v {
- let r = mem::replace(
- &mut roots[v],
- PackageRoot { is_local: false, include: vec![], exclude: vec![] },
- );
- roots[k].is_local |= r.is_local;
- roots[k].include.extend(r.include);
- roots[k].exclude.extend(r.exclude);
- }
- roots[k].include.sort();
- roots[k].exclude.sort();
- roots[k].include.dedup();
- roots[k].exclude.dedup();
- }
- }
-
- for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
- let file_set_roots: Vec<VfsPath> =
- root.include.iter().cloned().map(VfsPath::from).collect();
-
- let entry = {
- let mut dirs = vfs::loader::Directories::default();
- dirs.extensions.push("rs".into());
- dirs.include.extend(root.include);
- dirs.exclude.extend(root.exclude);
- for excl in global_excludes {
- if dirs
- .include
- .iter()
- .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
- {
- dirs.exclude.push(excl.clone());
- }
- }
-
- vfs::loader::Entry::Directories(dirs)
- };
-
- if root.is_local {
- res.watch.push(res.load.len());
- }
- res.load.push(entry);
-
- if root.is_local {
- local_filesets.push(fsc.len());
- }
- fsc.add_file_set(file_set_roots)
- }
-
- let fsc = fsc.build();
- res.source_root_config = SourceRootConfig { fsc, local_filesets };
-
- res
- }
-}
-
-#[derive(Default, Debug)]
-pub(crate) struct SourceRootConfig {
- pub(crate) fsc: FileSetConfig,
- pub(crate) local_filesets: Vec<usize>,
-}
-
-impl SourceRootConfig {
- pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
- let _p = profile::span("SourceRootConfig::partition");
- self.fsc
- .partition(vfs)
- .into_iter()
- .enumerate()
- .map(|(idx, file_set)| {
- let is_local = self.local_filesets.contains(&idx);
- if is_local {
- SourceRoot::new_local(file_set)
- } else {
- SourceRoot::new_library(file_set)
- }
- })
- .collect()
- }
-}
-
-/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
-/// with an identity dummy expander.
-pub(crate) fn load_proc_macro(
- server: &ProcMacroServer,
- path: &AbsPath,
- dummy_replace: &[Box<str>],
-) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
- let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
- if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_string());
- }
- Ok(vec
- .into_iter()
- .map(|expander| expander_to_proc_macro(expander, dummy_replace))
- .collect())
- })();
- return match res {
- Ok(proc_macros) => {
- tracing::info!(
- "Loaded proc-macros for {}: {:?}",
- path.display(),
- proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
- );
- Ok(proc_macros)
- }
- Err(e) => {
- tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
- Err(e)
- }
- };
-
- fn expander_to_proc_macro(
- expander: proc_macro_api::ProcMacro,
- dummy_replace: &[Box<str>],
- ) -> ProcMacro {
- let name = SmolStr::from(expander.name());
- let kind = match expander.kind() {
- proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
- proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
- proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
- };
- let expander: sync::Arc<dyn ProcMacroExpander> =
- if dummy_replace.iter().any(|replace| &**replace == name) {
- match kind {
- ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
- _ => sync::Arc::new(EmptyExpander),
- }
- } else {
- sync::Arc::new(Expander(expander))
- };
- ProcMacro { name, kind, expander }
- }
-
- #[derive(Debug)]
- struct Expander(proc_macro_api::ProcMacro);
-
- impl ProcMacroExpander for Expander {
- fn expand(
- &self,
- subtree: &tt::Subtree,
- attrs: Option<&tt::Subtree>,
- env: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
- match self.0.expand(subtree, attrs, env) {
- Ok(Ok(subtree)) => Ok(subtree),
- Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
- Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
- }
- }
- }
-
- /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
- #[derive(Debug)]
- struct IdentityExpander;
-
- impl ProcMacroExpander for IdentityExpander {
- fn expand(
- &self,
- subtree: &tt::Subtree,
- _: Option<&tt::Subtree>,
- _: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- Ok(subtree.clone())
- }
- }
-
- /// Empty expander, used for proc-macros that are deliberately ignored by the user.
- #[derive(Debug)]
- struct EmptyExpander;
-
- impl ProcMacroExpander for EmptyExpander {
- fn expand(
- &self,
- _: &tt::Subtree,
- _: Option<&tt::Subtree>,
- _: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- Ok(tt::Subtree::empty())
- }
- }
-}
-
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
index d4bb20c8f..1fe02fc7e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
@@ -78,6 +78,7 @@ define_semantic_token_types![
(DERIVE_HELPER, "deriveHelper") => DECORATOR,
(DOT, "dot"),
(ESCAPE_SEQUENCE, "escapeSequence") => STRING,
+ (INVALID_ESCAPE_SEQUENCE, "invalidEscapeSequence") => STRING,
(FORMAT_SPECIFIER, "formatSpecifier") => STRING,
(GENERIC, "generic") => TYPE_PARAMETER,
(LABEL, "label"),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
index 648bc995a..7b32180e3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
@@ -10,8 +10,8 @@ use ide::{
CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
InlayHintLabel, InlayHintLabelPart, InlayKind, Markup, NavigationTarget, ReferenceCategory,
- RenameError, Runnable, Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind,
- TextEdit, TextRange, TextSize,
+ RenameError, Runnable, Severity, SignatureHelp, SnippetEdit, SourceChange, StructureNodeKind,
+ SymbolKind, TextEdit, TextRange, TextSize,
};
use itertools::Itertools;
use serde_json::to_value;
@@ -22,7 +22,7 @@ use crate::{
config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, PositionEncoding},
- lsp_ext,
+ lsp_ext::{self, SnippetTextEdit},
lsp_utils::invalid_params_error,
semantic_tokens::{self, standard_fallback_type},
};
@@ -94,7 +94,10 @@ pub(crate) fn document_highlight_kind(
pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
match severity {
Severity::Error => lsp_types::DiagnosticSeverity::ERROR,
+ Severity::Warning => lsp_types::DiagnosticSeverity::WARNING,
Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT,
+ // unreachable
+ Severity::Allow => lsp_types::DiagnosticSeverity::INFORMATION,
}
}
@@ -637,6 +640,7 @@ fn semantic_token_type_and_modifiers(
HlTag::CharLiteral => semantic_tokens::CHAR,
HlTag::Comment => semantic_tokens::COMMENT,
HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
+ HlTag::InvalidEscapeSequence => semantic_tokens::INVALID_ESCAPE_SEQUENCE,
HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
HlTag::Keyword => semantic_tokens::KEYWORD,
HlTag::None => semantic_tokens::GENERIC,
@@ -881,16 +885,136 @@ fn outside_workspace_annotation_id() -> String {
String::from("OutsideWorkspace")
}
+fn merge_text_and_snippet_edits(
+ line_index: &LineIndex,
+ edit: TextEdit,
+ snippet_edit: SnippetEdit,
+) -> Vec<SnippetTextEdit> {
+ let mut edits: Vec<SnippetTextEdit> = vec![];
+ let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable();
+ let mut text_edits = edit.into_iter();
+
+ while let Some(current_indel) = text_edits.next() {
+ let new_range = {
+ let insert_len =
+ TextSize::try_from(current_indel.insert.len()).unwrap_or(TextSize::from(u32::MAX));
+ TextRange::at(current_indel.delete.start(), insert_len)
+ };
+
+ // insert any snippets before the text edit
+ for (snippet_index, snippet_range) in
+ snippets.take_while_ref(|(_, range)| range.end() < new_range.start())
+ {
+ let snippet_range = if !stdx::always!(
+ snippet_range.is_empty(),
+ "placeholder range {:?} is before current text edit range {:?}",
+ snippet_range,
+ new_range
+ ) {
+ // only possible for tabstops, so make sure it's an empty/insert range
+ TextRange::empty(snippet_range.start())
+ } else {
+ snippet_range
+ };
+
+ let range = range(&line_index, snippet_range);
+ let new_text = format!("${snippet_index}");
+
+ edits.push(SnippetTextEdit {
+ range,
+ new_text,
+ insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET),
+ annotation_id: None,
+ })
+ }
+
+ if snippets.peek().is_some_and(|(_, range)| new_range.intersect(*range).is_some()) {
+ // at least one snippet edit intersects this text edit,
+ // so gather all of the edits that intersect this text edit
+ let mut all_snippets = snippets
+ .take_while_ref(|(_, range)| new_range.intersect(*range).is_some())
+ .collect_vec();
+
+ // ensure all of the ranges are wholly contained inside of the new range
+ all_snippets.retain(|(_, range)| {
+ stdx::always!(
+ new_range.contains_range(*range),
+ "found placeholder range {:?} which wasn't fully inside of text edit's new range {:?}", range, new_range
+ )
+ });
+
+ let mut text_edit = text_edit(line_index, current_indel);
+
+ // escape out snippet text
+ stdx::replace(&mut text_edit.new_text, '\\', r"\\");
+ stdx::replace(&mut text_edit.new_text, '$', r"\$");
+
+ // ...and apply!
+ for (index, range) in all_snippets.iter().rev() {
+ let start = (range.start() - new_range.start()).into();
+ let end = (range.end() - new_range.start()).into();
+
+ if range.is_empty() {
+ text_edit.new_text.insert_str(start, &format!("${index}"));
+ } else {
+ text_edit.new_text.insert(end, '}');
+ text_edit.new_text.insert_str(start, &format!("${{{index}:"));
+ }
+ }
+
+ edits.push(SnippetTextEdit {
+ range: text_edit.range,
+ new_text: text_edit.new_text,
+ insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET),
+ annotation_id: None,
+ })
+ } else {
+ // snippet edit was beyond the current one
+ // since it wasn't consumed, it's available for the next pass
+ edits.push(snippet_text_edit(line_index, false, current_indel));
+ }
+ }
+
+ // insert any remaining tabstops
+ edits.extend(snippets.map(|(snippet_index, snippet_range)| {
+ let snippet_range = if !stdx::always!(
+ snippet_range.is_empty(),
+ "found placeholder snippet {:?} without a text edit",
+ snippet_range
+ ) {
+ TextRange::empty(snippet_range.start())
+ } else {
+ snippet_range
+ };
+
+ let range = range(&line_index, snippet_range);
+ let new_text = format!("${snippet_index}");
+
+ SnippetTextEdit {
+ range,
+ new_text,
+ insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET),
+ annotation_id: None,
+ }
+ }));
+
+ edits
+}
+
pub(crate) fn snippet_text_document_edit(
snap: &GlobalStateSnapshot,
is_snippet: bool,
file_id: FileId,
edit: TextEdit,
+ snippet_edit: Option<SnippetEdit>,
) -> Cancellable<lsp_ext::SnippetTextDocumentEdit> {
let text_document = optional_versioned_text_document_identifier(snap, file_id);
let line_index = snap.file_line_index(file_id)?;
- let mut edits: Vec<_> =
- edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
+ let mut edits = if let Some(snippet_edit) = snippet_edit {
+ merge_text_and_snippet_edits(&line_index, edit, snippet_edit)
+ } else {
+ edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect()
+ };
if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
for edit in &mut edits {
@@ -970,8 +1094,14 @@ pub(crate) fn snippet_workspace_edit(
let ops = snippet_text_document_ops(snap, op)?;
document_changes.extend_from_slice(&ops);
}
- for (file_id, edit) in source_change.source_file_edits {
- let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
+ for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
+ let edit = snippet_text_document_edit(
+ snap,
+ source_change.is_snippet,
+ file_id,
+ edit,
+ snippet_edit,
+ )?;
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
}
let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
@@ -1410,7 +1540,9 @@ pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
#[cfg(test)]
mod tests {
+ use expect_test::{expect, Expect};
use ide::{Analysis, FilePosition};
+ use ide_db::source_change::Snippet;
use test_utils::extract_offset;
use triomphe::Arc;
@@ -1480,6 +1612,481 @@ fn bar(_: usize) {}
assert!(!docs.contains("use crate::bar"));
}
+ fn check_rendered_snippets(edit: TextEdit, snippets: SnippetEdit, expect: Expect) {
+ let text = r#"/* place to put all ranges in */"#;
+ let line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(text)),
+ endings: LineEndings::Unix,
+ encoding: PositionEncoding::Utf8,
+ };
+
+ let res = merge_text_and_snippet_edits(&line_index, edit, snippets);
+ expect.assert_debug_eq(&res);
+ }
+
+ #[test]
+ fn snippet_rendering_only_tabstops() {
+ let edit = TextEdit::builder().finish();
+ let snippets = SnippetEdit::new(vec![
+ Snippet::Tabstop(0.into()),
+ Snippet::Tabstop(0.into()),
+ Snippet::Tabstop(1.into()),
+ Snippet::Tabstop(1.into()),
+ ]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$1",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$2",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 1,
+ },
+ end: Position {
+ line: 0,
+ character: 1,
+ },
+ },
+ new_text: "$3",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 1,
+ },
+ end: Position {
+ line: 0,
+ character: 1,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_only_text_edits() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abc".to_owned());
+ edit.insert(3.into(), "def".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 3,
+ },
+ end: Position {
+ line: 0,
+ character: 3,
+ },
+ },
+ new_text: "def",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_tabstop_after_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abc".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![Snippet::Tabstop(7.into())]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 7,
+ },
+ end: Position {
+ line: 0,
+ character: 7,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_tabstops_before_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(2.into(), "abc".to_owned());
+ let edit = edit.finish();
+ let snippets =
+ SnippetEdit::new(vec![Snippet::Tabstop(0.into()), Snippet::Tabstop(0.into())]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$1",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 2,
+ },
+ end: Position {
+ line: 0,
+ character: 2,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_tabstops_between_text_edits() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abc".to_owned());
+ edit.insert(7.into(), "abc".to_owned());
+ let edit = edit.finish();
+ let snippets =
+ SnippetEdit::new(vec![Snippet::Tabstop(4.into()), Snippet::Tabstop(4.into())]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 4,
+ },
+ end: Position {
+ line: 0,
+ character: 4,
+ },
+ },
+ new_text: "$1",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 4,
+ },
+ end: Position {
+ line: 0,
+ character: 4,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 7,
+ },
+ end: Position {
+ line: 0,
+ character: 7,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_multiple_tabstops_in_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abcdefghijkl".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![
+ Snippet::Tabstop(0.into()),
+ Snippet::Tabstop(5.into()),
+ Snippet::Tabstop(12.into()),
+ ]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$1abcde$2fghijkl$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_multiple_placeholders_in_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abcdefghijkl".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![
+ Snippet::Placeholder(TextRange::new(0.into(), 3.into())),
+ Snippet::Placeholder(TextRange::new(5.into(), 7.into())),
+ Snippet::Placeholder(TextRange::new(10.into(), 12.into())),
+ ]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "${1:abc}de${2:fg}hij${0:kl}",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_escape_snippet_bits() {
+ // only needed for snippet formats
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), r"abc\def$".to_owned());
+ edit.insert(8.into(), r"ghi\jkl$".to_owned());
+ let edit = edit.finish();
+ let snippets =
+ SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(0.into(), 3.into()))]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "${0:abc}\\\\def\\$",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 8,
+ },
+ end: Position {
+ line: 0,
+ character: 8,
+ },
+ },
+ new_text: "ghi\\jkl$",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
// `Url` is not able to parse windows paths on unix machines.
#[test]
#[cfg(target_os = "windows")]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index b2a8041ae..3c52ef5ef 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -13,6 +13,7 @@ use rust_analyzer::{config::Config, lsp_ext, main_loop};
use serde::Serialize;
use serde_json::{json, to_string_pretty, Value};
use test_utils::FixtureWithProjectMeta;
+use tracing_subscriber::{prelude::*, Layer};
use vfs::AbsPathBuf;
use crate::testdir::TestDir;
@@ -24,7 +25,7 @@ pub(crate) struct Project<'a> {
config: serde_json::Value,
}
-impl<'a> Project<'a> {
+impl Project<'_> {
pub(crate) fn with_fixture(fixture: &str) -> Project<'_> {
Project {
fixture,
@@ -47,17 +48,17 @@ impl<'a> Project<'a> {
}
}
- pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Project<'a> {
+ pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self {
self.tmp_dir = Some(tmp_dir);
self
}
- pub(crate) fn root(mut self, path: &str) -> Project<'a> {
+ pub(crate) fn root(mut self, path: &str) -> Self {
self.roots.push(path.into());
self
}
- pub(crate) fn with_config(mut self, config: serde_json::Value) -> Project<'a> {
+ pub(crate) fn with_config(mut self, config: serde_json::Value) -> Self {
fn merge(dst: &mut serde_json::Value, src: serde_json::Value) {
match (dst, src) {
(Value::Object(dst), Value::Object(src)) => {
@@ -76,10 +77,11 @@ impl<'a> Project<'a> {
let tmp_dir = self.tmp_dir.unwrap_or_else(TestDir::new);
static INIT: Once = Once::new();
INIT.call_once(|| {
- tracing_subscriber::fmt()
- .with_test_writer()
- .with_env_filter(tracing_subscriber::EnvFilter::from_env("RA_LOG"))
- .init();
+ let filter: tracing_subscriber::filter::Targets =
+ std::env::var("RA_LOG").ok().and_then(|it| it.parse().ok()).unwrap_or_default();
+ let layer =
+ tracing_subscriber::fmt::Layer::new().with_test_writer().with_filter(filter);
+ tracing_subscriber::Registry::default().with(layer).init();
profile::init_from(crate::PROFILE);
});
@@ -111,6 +113,14 @@ impl<'a> Project<'a> {
relative_pattern_support: None,
},
),
+ workspace_edit: Some(lsp_types::WorkspaceEditClientCapabilities {
+ resource_operations: Some(vec![
+ lsp_types::ResourceOperationKind::Create,
+ lsp_types::ResourceOperationKind::Delete,
+ lsp_types::ResourceOperationKind::Rename,
+ ]),
+ ..Default::default()
+ }),
..Default::default()
}),
text_document: Some(lsp_types::TextDocumentClientCapabilities {
diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
index c5da6ceb4..1514c6c7d 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
@@ -61,7 +61,9 @@ impl CommentBlock {
let mut blocks = CommentBlock::extract_untagged(text);
blocks.retain_mut(|block| {
let first = block.contents.remove(0);
- let Some(id) = first.strip_prefix(&tag) else { return false; };
+ let Some(id) = first.strip_prefix(&tag) else {
+ return false;
+ };
if block.is_doc {
panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}");
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
index a67f36ae9..536f000a4 100644
--- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
libc = "0.2.135"
-backtrace = { version = "0.3.65", optional = true }
+backtrace = { version = "0.3.67", optional = true }
always-assert = { version = "0.1.2", features = ["log"] }
jod-thread = "0.1.2"
crossbeam-channel = "0.5.5"
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index fb38d25ab..5ee0c4792 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -19,7 +19,7 @@ itertools = "0.10.5"
rowan = "0.15.11"
rustc-hash = "1.1.0"
once_cell = "1.17.0"
-indexmap = "1.9.1"
+indexmap = "2.0.0"
smol_str.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index b096c9974..138ddd208 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -72,6 +72,12 @@ TokenTree =
MacroItems =
Item*
+MacroEagerInput =
+ '(' (Expr (',' Expr)* ','?)? ')'
+| '{' (Expr (',' Expr)* ','?)? '}'
+| '[' (Expr (',' Expr)* ','?)? ']'
+
+
MacroStmts =
statements:Stmt*
Expr?
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
index b3ea6ca8d..a150d9e6c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -213,6 +213,28 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
}
}
}
+
+ fn add_attr(&self, attr: ast::Attr) {
+ add_attr(self.syntax(), attr);
+
+ fn add_attr(node: &SyntaxNode, attr: ast::Attr) {
+ let indent = IndentLevel::from_node(node);
+ attr.reindent_to(indent);
+
+ let after_attrs_and_comments = node
+ .children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .map_or(Position::first_child_of(node), |it| Position::before(it));
+
+ ted::insert_all(
+ after_attrs_and_comments,
+ vec![
+ attr.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n{indent}")).into(),
+ ],
+ )
+ }
+ }
}
impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
@@ -358,6 +380,26 @@ impl Removable for ast::UseTree {
}
impl ast::UseTree {
+ /// Deletes the usetree node represented by the input. Recursively removes parents, including use nodes that become empty.
+ pub fn remove_recursive(self) {
+ let parent = self.syntax().parent();
+
+ self.remove();
+
+ if let Some(u) = parent.clone().and_then(ast::Use::cast) {
+ if u.use_tree().is_none() {
+ u.remove();
+ }
+ } else if let Some(u) = parent.and_then(ast::UseTreeList::cast) {
+ if u.use_trees().next().is_none() {
+ let parent = u.syntax().parent().and_then(ast::UseTree::cast);
+ if let Some(u) = parent {
+ u.remove_recursive();
+ }
+ }
+ }
+ }
+
pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList {
match self.use_tree_list() {
Some(it) => it,
@@ -465,6 +507,22 @@ impl Removable for ast::Use {
}
}
}
+ let prev_ws = self
+ .syntax()
+ .prev_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(prev_ws) = prev_ws {
+ let ws_text = prev_ws.syntax().text();
+ let prev_newline = ws_text.rfind('\n').map(|x| x + 1).unwrap_or(0);
+ let rest = &ws_text[0..prev_newline];
+ if rest.is_empty() {
+ ted::remove(prev_ws.syntax());
+ } else {
+ ted::replace(prev_ws.syntax(), make::tokens::whitespace(rest));
+ }
+ }
+
ted::remove(self.syntax());
}
}
@@ -676,12 +734,6 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
}
}
-impl ast::StmtList {
- pub fn push_front(&self, statement: ast::Stmt) {
- ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
- }
-}
-
impl ast::VariantList {
pub fn add_variant(&self, variant: ast::Variant) {
let (indent, position) = match self.variants().last() {
@@ -732,6 +784,27 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
Some(())
}
+pub trait HasVisibilityEdit: ast::HasVisibility {
+ fn set_visibility(&self, visbility: ast::Visibility) {
+ match self.visibility() {
+ Some(current_visibility) => {
+ ted::replace(current_visibility.syntax(), visbility.syntax())
+ }
+ None => {
+ let vis_before = self
+ .syntax()
+ .children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap());
+
+ ted::insert(ted::Position::before(vis_before), visbility.syntax());
+ }
+ }
+ }
+}
+
+impl<T: ast::HasVisibility> HasVisibilityEdit for T {}
+
pub trait Indent: AstNode + Clone + Sized {
fn indent_level(&self) -> IndentLevel {
IndentLevel::from_node(self.syntax())
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index e520801ea..0b27faa53 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -198,6 +198,20 @@ impl ast::HasModuleItem for MacroItems {}
impl MacroItems {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroEagerInput {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroEagerInput {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroStmts {
pub(crate) syntax: SyntaxNode,
}
@@ -1922,6 +1936,17 @@ impl AstNode for MacroItems {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
+impl AstNode for MacroEagerInput {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EAGER_INPUT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
impl AstNode for MacroStmts {
fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
fn cast(syntax: SyntaxNode) -> Option<Self> {
@@ -4360,6 +4385,11 @@ impl std::fmt::Display for MacroItems {
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for MacroEagerInput {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for MacroStmts {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
index 3c2b7e56b..4c6db0ef0 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -10,6 +10,8 @@
//! `parse(format!())` we use internally is an implementation detail -- long
//! term, it will be replaced with direct tree manipulation.
use itertools::Itertools;
+use parser::T;
+use rowan::NodeOrToken;
use stdx::{format_to, never};
use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
@@ -447,6 +449,21 @@ pub fn block_expr(
ast_from_text(&format!("fn f() {buf}"))
}
+pub fn async_move_block_expr(
+ stmts: impl IntoIterator<Item = ast::Stmt>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "async move {\n".to_string();
+ for stmt in stmts.into_iter() {
+ format_to!(buf, " {stmt}\n");
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {tail_expr}\n");
+ }
+ buf += "}";
+ ast_from_text(&format!("const _: () = {buf};"))
+}
+
pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr {
ast_from_text(&format!("fn f() {{ {tail_expr} }}"))
}
@@ -848,6 +865,36 @@ pub fn param_list(
ast_from_text(&list)
}
+pub fn trait_(
+ is_unsafe: bool,
+ ident: &str,
+ gen_params: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ assoc_items: ast::AssocItemList,
+) -> ast::Trait {
+ let mut text = String::new();
+
+ if is_unsafe {
+ format_to!(text, "unsafe ");
+ }
+
+ format_to!(text, "trait {ident}");
+
+ if let Some(gen_params) = gen_params {
+ format_to!(text, "{} ", gen_params.to_string());
+ } else {
+ text.push(' ');
+ }
+
+ if let Some(where_clause) = where_clause {
+ format_to!(text, "{} ", where_clause.to_string());
+ }
+
+ format_to!(text, "{}", assoc_items.to_string());
+
+ ast_from_text(&text)
+}
+
pub fn type_bound(bound: &str) -> ast::TypeBound {
ast_from_text(&format!("fn f<T: {bound}>() {{ }}"))
}
@@ -985,6 +1032,41 @@ pub fn struct_(
ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",))
}
+pub fn attr_outer(meta: ast::Meta) -> ast::Attr {
+ ast_from_text(&format!("#[{meta}]"))
+}
+
+pub fn attr_inner(meta: ast::Meta) -> ast::Attr {
+ ast_from_text(&format!("#![{meta}]"))
+}
+
+pub fn meta_expr(path: ast::Path, expr: ast::Expr) -> ast::Meta {
+ ast_from_text(&format!("#[{path} = {expr}]"))
+}
+
+pub fn meta_token_tree(path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
+ ast_from_text(&format!("#[{path}{tt}]"))
+}
+
+pub fn meta_path(path: ast::Path) -> ast::Meta {
+ ast_from_text(&format!("#[{path}]"))
+}
+
+pub fn token_tree(
+ delimiter: SyntaxKind,
+ tt: Vec<NodeOrToken<ast::TokenTree, SyntaxToken>>,
+) -> ast::TokenTree {
+ let (l_delimiter, r_delimiter) = match delimiter {
+ T!['('] => ('(', ')'),
+ T!['['] => ('[', ']'),
+ T!['{'] => ('{', '}'),
+ _ => panic!("invalid delimiter `{delimiter:?}`"),
+ };
+ let tt = tt.into_iter().join("");
+
+ ast_from_text(&format!("tt!{l_delimiter}{tt}{r_delimiter}"))
+}
+
#[track_caller]
fn ast_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text);
@@ -1022,6 +1104,17 @@ pub mod tokens {
)
});
+ pub fn semicolon() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == SEMICOLON)
+ .unwrap()
+ }
+
pub fn single_space() -> SyntaxToken {
SOURCE_FILE
.tree()
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index 090eb89f4..87fd51d70 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -2,7 +2,9 @@
use std::borrow::Cow;
-use rustc_lexer::unescape::{unescape_byte, unescape_char, unescape_literal, Mode};
+use rustc_lexer::unescape::{
+ unescape_byte, unescape_c_string, unescape_char, unescape_literal, CStrUnit, Mode,
+};
use crate::{
ast::{self, AstToken},
@@ -146,6 +148,7 @@ impl QuoteOffsets {
pub trait IsString: AstToken {
const RAW_PREFIX: &'static str;
+ const MODE: Mode;
fn is_raw(&self) -> bool {
self.text().starts_with(Self::RAW_PREFIX)
}
@@ -181,7 +184,7 @@ pub trait IsString: AstToken {
let text = &self.text()[text_range_no_quotes - start];
let offset = text_range_no_quotes.start() - start;
- unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ unescape_literal(text, Self::MODE, &mut |range, unescaped_char| {
let text_range =
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
cb(text_range + offset, unescaped_char);
@@ -196,6 +199,7 @@ pub trait IsString: AstToken {
impl IsString for ast::String {
const RAW_PREFIX: &'static str = "r";
+ const MODE: Mode = Mode::Str;
}
impl ast::String {
@@ -213,7 +217,7 @@ impl ast::String {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
+ unescape_literal(text, Self::MODE, &mut |char_range, unescaped_char| match (
unescaped_char,
buf.capacity() == 0,
) {
@@ -239,6 +243,7 @@ impl ast::String {
impl IsString for ast::ByteString {
const RAW_PREFIX: &'static str = "br";
+ const MODE: Mode = Mode::ByteStr;
}
impl ast::ByteString {
@@ -256,7 +261,7 @@ impl ast::ByteString {
let mut buf: Vec<u8> = Vec::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
+ unescape_literal(text, Self::MODE, &mut |char_range, unescaped_char| match (
unescaped_char,
buf.capacity() == 0,
) {
@@ -282,42 +287,70 @@ impl ast::ByteString {
impl IsString for ast::CString {
const RAW_PREFIX: &'static str = "cr";
+ const MODE: Mode = Mode::CStr;
+
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_c_string(text, Self::MODE, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ // XXX: This method should only be used for highlighting ranges. The unescaped
+ // char/byte is not used. For simplicity, we return an arbitrary placeholder char.
+ cb(text_range + offset, unescaped_char.map(|_| ' '));
+ });
+ }
}
impl ast::CString {
- pub fn value(&self) -> Option<Cow<'_, str>> {
+ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
if self.is_raw() {
let text = self.text();
let text =
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
- return Some(Cow::Borrowed(text));
+ return Some(Cow::Borrowed(text.as_bytes()));
}
let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
- let mut buf = String::new();
+ let mut buf = Vec::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
- unescaped_char,
+ let mut char_buf = [0u8; 4];
+ let mut extend_unit = |buf: &mut Vec<u8>, unit: CStrUnit| match unit {
+ CStrUnit::Byte(b) => buf.push(b),
+ CStrUnit::Char(c) => buf.extend(c.encode_utf8(&mut char_buf).as_bytes()),
+ };
+ unescape_c_string(text, Self::MODE, &mut |char_range, unescaped| match (
+ unescaped,
buf.capacity() == 0,
) {
- (Ok(c), false) => buf.push(c),
+ (Ok(u), false) => extend_unit(&mut buf, u),
(Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
prev_end = char_range.end
}
- (Ok(c), true) => {
+ (Ok(u), true) => {
buf.reserve_exact(text.len());
- buf.push_str(&text[..prev_end]);
- buf.push(c);
+ buf.extend(text[..prev_end].as_bytes());
+ extend_unit(&mut buf, u);
}
(Err(_), _) => has_error = true,
});
match (has_error, buf.capacity() == 0) {
(true, _) => None,
- (false, true) => Some(Cow::Borrowed(text)),
+ (false, true) => Some(Cow::Borrowed(text.as_bytes())),
(false, false) => Some(Cow::Owned(buf)),
}
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
index efbf87966..4cd668a0c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -171,6 +171,109 @@ impl SourceFile {
}
}
+impl ast::TokenTree {
+ pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> {
+ let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
+
+ let mut parser_input = parser::Input::default();
+ let mut was_joint = false;
+ for t in tokens {
+ let kind = t.kind();
+ if kind.is_trivia() {
+ was_joint = false
+ } else {
+ if kind == SyntaxKind::IDENT {
+ let token_text = t.text();
+ let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
+ .unwrap_or(SyntaxKind::IDENT);
+ parser_input.push_ident(contextual_kw);
+ } else {
+ if was_joint {
+ parser_input.was_joint();
+ }
+ parser_input.push(kind);
+ // Tag the token as joint if it is float with a fractional part
+ // we use this jointness to inform the parser about what token split
+ // event to emit when we encounter a float literal in a field access
+ if kind == SyntaxKind::FLOAT_NUMBER && !t.text().ends_with('.') {
+ parser_input.was_joint();
+ }
+ }
+ was_joint = true;
+ }
+ }
+
+ let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input);
+
+ let mut tokens =
+ self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
+ let mut text = String::new();
+ let mut pos = TextSize::from(0);
+ let mut builder = SyntaxTreeBuilder::default();
+ for event in parser_output.iter() {
+ match event {
+ parser::Step::Token { kind, n_input_tokens } => {
+ let mut token = tokens.next().unwrap();
+ while token.kind().is_trivia() {
+ let text = token.text();
+ pos += TextSize::from(text.len() as u32);
+ builder.token(token.kind(), text);
+
+ token = tokens.next().unwrap();
+ }
+ text.push_str(token.text());
+ for _ in 1..n_input_tokens {
+ let token = tokens.next().unwrap();
+ text.push_str(token.text());
+ }
+
+ pos += TextSize::from(text.len() as u32);
+ builder.token(kind, &text);
+ text.clear();
+ }
+ parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
+ let token = tokens.next().unwrap();
+ let text = token.text();
+
+ match text.split_once('.') {
+ Some((left, right)) => {
+ assert!(!left.is_empty());
+ builder.start_node(SyntaxKind::NAME_REF);
+ builder.token(SyntaxKind::INT_NUMBER, left);
+ builder.finish_node();
+
+ // here we move the exit up, the original exit has been deleted in process
+ builder.finish_node();
+
+ builder.token(SyntaxKind::DOT, ".");
+
+ if has_pseudo_dot {
+ assert!(right.is_empty(), "{left}.{right}");
+ } else {
+ builder.start_node(SyntaxKind::NAME_REF);
+ builder.token(SyntaxKind::INT_NUMBER, right);
+ builder.finish_node();
+
+ // the parser creates an unbalanced start node, we are required to close it here
+ builder.finish_node();
+ }
+ }
+ None => unreachable!(),
+ }
+ pos += TextSize::from(text.len() as u32);
+ }
+ parser::Step::Enter { kind } => builder.start_node(kind),
+ parser::Step::Exit => builder.finish_node(),
+ parser::Step::Error { msg } => builder.error(msg.to_owned(), pos),
+ }
+ }
+
+ let (green, errors) = builder.finish_raw();
+
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+}
+
/// Matches a `SyntaxNode` against an `ast` type.
///
/// # Example:
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
index c5783b91a..e4db33f1c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
@@ -216,6 +216,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
// macro related
"MACRO_ITEMS",
"MACRO_STMTS",
+ "MACRO_EAGER_INPUT",
],
};
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
index 602baed37..75e7a3fec 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
@@ -47,13 +47,10 @@
//! ```
//!
//! Metadata allows specifying all settings and variables
-//! that are available in a real rust project:
-//! - crate names via `crate:cratename`
-//! - dependencies via `deps:dep1,dep2`
-//! - configuration settings via `cfg:dbg=false,opt_level=2`
-//! - environment variables via `env:PATH=/bin,RUST_LOG=debug`
+//! that are available in a real rust project. See [`Fixture`]
+//! for the syntax.
//!
-//! Example using all available metadata:
+//! Example using some available metadata:
//! ```
//! "
//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
@@ -68,17 +65,74 @@ use stdx::trim_indent;
#[derive(Debug, Eq, PartialEq)]
pub struct Fixture {
+ /// Specifies the path for this file. It must start with "/".
pub path: String,
- pub text: String,
+ /// Defines a new crate and make this file its root module.
+ ///
+ /// Version and repository URL of the crate can optionally be specified; if
+ /// either one is specified, the other must also be specified.
+ ///
+ /// Syntax:
+ /// - `crate:my_awesome_lib`
+ /// - `crate:my_awesome_lib@0.0.1,https://example.com/repo.git`
pub krate: Option<String>,
+ /// Specifies dependencies of this crate. This must be used with `crate` meta.
+ ///
+ /// Syntax: `deps:hir-def,ide-assists`
pub deps: Vec<String>,
+ /// Limits crates in the extern prelude. The set of crate names must be a
+ /// subset of `deps`. This must be used with `crate` meta.
+ ///
+ /// If this is not specified, all the dependencies will be in the extern prelude.
+ ///
+ /// Syntax: `extern-prelude:hir-def,ide-assists`
pub extern_prelude: Option<Vec<String>>,
- pub cfg_atoms: Vec<String>,
- pub cfg_key_values: Vec<(String, String)>,
+ /// Specifies configuration options to be enabled. Options may have associated
+ /// values.
+ ///
+ /// Syntax: `cfg:test,dbg=false,opt_level=2`
+ pub cfgs: Vec<(String, Option<String>)>,
+ /// Specifies the edition of this crate. This must be used with `crate` meta. If
+ /// this is not specified, ([`base_db::input::Edition::CURRENT`]) will be used.
+ /// This must be used with `crate` meta.
+ ///
+ /// Syntax: `edition:2021`
pub edition: Option<String>,
+ /// Specifies environment variables.
+ ///
+ /// Syntax: `env:PATH=/bin,RUST_LOG=debug`
pub env: FxHashMap<String, String>,
+ /// Introduces a new [source root](base_db::input::SourceRoot). This file **and
+ /// the following files** will belong the new source root. This must be used
+ /// with `crate` meta.
+ ///
+ /// Use this if you want to test something that uses `SourceRoot::is_library()`
+ /// to check editability.
+ ///
+ /// Note that files before the first fixture with `new_source_root` meta will
+ /// belong to an implicitly defined local source root.
+ ///
+ /// Syntax:
+ /// - `new_source_root:library`
+ /// - `new_source_root:local`
pub introduce_new_source_root: Option<String>,
+ /// Explicitly declares this crate as a library outside current workspace. This
+ /// must be used with `crate` meta.
+ ///
+ /// This is implied if this file belongs to a library source root.
+ ///
+ /// Use this if you want to test something that checks if a crate is a workspace
+ /// member via [`CrateOrigin`](base_db::input::CrateOrigin).
+ ///
+ /// Syntax: `library`
+ pub library: bool,
+ /// Specifies LLVM data layout to be used.
+ ///
+ /// You probably don't want to manually specify this. See LLVM manual for the
+ /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout
pub target_data_layout: Option<String>,
+ /// Actual file contents. All meta comments are stripped.
+ pub text: String,
}
pub struct MiniCore {
@@ -178,23 +232,28 @@ impl FixtureWithProjectMeta {
fn parse_meta_line(meta: &str) -> Fixture {
assert!(meta.starts_with("//-"));
let meta = meta["//-".len()..].trim();
- let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
+ let mut components = meta.split_ascii_whitespace();
- let path = components[0].to_string();
+ let path = components.next().expect("fixture meta must start with a path").to_string();
assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}");
let mut krate = None;
let mut deps = Vec::new();
let mut extern_prelude = None;
let mut edition = None;
- let mut cfg_atoms = Vec::new();
- let mut cfg_key_values = Vec::new();
+ let mut cfgs = Vec::new();
let mut env = FxHashMap::default();
let mut introduce_new_source_root = None;
+ let mut library = false;
let mut target_data_layout = Some(
"e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_string(),
);
- for component in components[1..].iter() {
+ for component in components {
+ if component == "library" {
+ library = true;
+ continue;
+ }
+
let (key, value) =
component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}"));
match key {
@@ -212,8 +271,8 @@ impl FixtureWithProjectMeta {
"cfg" => {
for entry in value.split(',') {
match entry.split_once('=') {
- Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())),
- None => cfg_atoms.push(entry.to_string()),
+ Some((k, v)) => cfgs.push((k.to_string(), Some(v.to_string()))),
+ None => cfgs.push((entry.to_string(), None)),
}
}
}
@@ -243,11 +302,11 @@ impl FixtureWithProjectMeta {
krate,
deps,
extern_prelude,
- cfg_atoms,
- cfg_key_values,
+ cfgs,
edition,
env,
introduce_new_source_root,
+ library,
target_data_layout,
}
}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 266bc2391..c765f4244 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -20,6 +20,7 @@
//! deref_mut: deref
//! deref: sized
//! derive:
+//! discriminant:
//! drop:
//! eq: sized
//! error: fmt
@@ -36,6 +37,7 @@
//! iterator: option
//! iterators: iterator, fn
//! manually_drop: drop
+//! non_null:
//! non_zero:
//! option: panic
//! ord: eq, option
@@ -129,6 +131,14 @@ pub mod marker {
#[lang = "phantom_data"]
pub struct PhantomData<T: ?Sized>;
// endregion:phantom_data
+
+ // region:discriminant
+ #[lang = "discriminant_kind"]
+ pub trait DiscriminantKind {
+ #[lang = "discriminant_type"]
+ type Discriminant;
+ }
+ // endregion:discriminant
}
// region:default
@@ -354,6 +364,11 @@ pub mod mem {
pub fn size_of<T>() -> usize;
}
// endregion:size_of
+
+ // region:discriminant
+ use crate::marker::DiscriminantKind;
+ pub struct Discriminant<T>(<T as DiscriminantKind>::Discriminant);
+ // endregion:discriminant
}
pub mod ptr {
@@ -377,6 +392,19 @@ pub mod ptr {
type Metadata;
}
// endregion:pointee
+ // region:non_null
+ #[rustc_layout_scalar_valid_range_start(1)]
+ #[rustc_nonnull_optimization_guaranteed]
+ pub struct NonNull<T: ?Sized> {
+ pointer: *const T,
+ }
+ // region:coerce_unsized
+ impl<T: ?Sized, U: ?Sized> crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T> where
+ T: crate::marker::Unsize<U>
+ {
+ }
+ // endregion:coerce_unsized
+ // endregion:non_null
}
pub mod ops {
@@ -1287,6 +1315,11 @@ mod macros {
pub macro derive($item:item) {
/* compiler built-in */
}
+
+ #[rustc_builtin_macro]
+ pub macro derive_const($item:item) {
+ /* compiler built-in */
+ }
}
// endregion:derive
@@ -1354,24 +1387,24 @@ pub mod error {
pub mod prelude {
pub mod v1 {
pub use crate::{
- clone::Clone, // :clone
- cmp::{Eq, PartialEq}, // :eq
- cmp::{Ord, PartialOrd}, // :ord
- convert::AsRef, // :as_ref
- convert::{From, Into}, // :from
- default::Default, // :default
- iter::{IntoIterator, Iterator}, // :iterator
- macros::builtin::derive, // :derive
- marker::Copy, // :copy
- marker::Send, // :send
- marker::Sized, // :sized
- marker::Sync, // :sync
- mem::drop, // :drop
- ops::Drop, // :drop
- ops::{Fn, FnMut, FnOnce}, // :fn
- option::Option::{self, None, Some}, // :option
- panic, // :panic
- result::Result::{self, Err, Ok}, // :result
+ clone::Clone, // :clone
+ cmp::{Eq, PartialEq}, // :eq
+ cmp::{Ord, PartialOrd}, // :ord
+ convert::AsRef, // :as_ref
+ convert::{From, Into}, // :from
+ default::Default, // :default
+ iter::{IntoIterator, Iterator}, // :iterator
+ macros::builtin::{derive, derive_const}, // :derive
+ marker::Copy, // :copy
+ marker::Send, // :send
+ marker::Sized, // :sized
+ marker::Sync, // :sync
+ mem::drop, // :drop
+ ops::Drop, // :drop
+ ops::{Fn, FnMut, FnOnce}, // :fn
+ option::Option::{self, None, Some}, // :option
+ panic, // :panic
+ result::Result::{self, Err, Ok}, // :result
};
}
diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
index 0615a3763..cade2e9f6 100644
--- a/src/tools/rust-analyzer/crates/tt/src/buffer.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
@@ -134,7 +134,7 @@ pub enum TokenTreeRef<'a, Span> {
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
}
-impl<'a, Span: Clone> TokenTreeRef<'a, Span> {
+impl<Span: Clone> TokenTreeRef<'_, Span> {
pub fn cloned(&self) -> TokenTree<Span> {
match self {
TokenTreeRef::Subtree(subtree, tt) => match tt {
@@ -153,13 +153,13 @@ pub struct Cursor<'a, Span> {
ptr: EntryPtr,
}
-impl<'a, Span> PartialEq for Cursor<'a, Span> {
+impl<Span> PartialEq for Cursor<'_, Span> {
fn eq(&self, other: &Cursor<'_, Span>) -> bool {
self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer)
}
}
-impl<'a, Span> Eq for Cursor<'a, Span> {}
+impl<Span> Eq for Cursor<'_, Span> {}
impl<'a, Span> Cursor<'a, Span> {
/// Check whether it is eof
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
index c2ebf0374..b5a72bec0 100644
--- a/src/tools/rust-analyzer/crates/tt/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -65,7 +65,22 @@ pub mod token_id {
}
impl TokenTree {
pub const fn empty() -> Self {
- Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] })
+ Self::Subtree(Subtree::empty())
+ }
+ }
+
+ impl Subtree {
+ pub fn visit_ids(&mut self, f: &mut impl FnMut(TokenId) -> TokenId) {
+ self.delimiter.open = f(self.delimiter.open);
+ self.delimiter.close = f(self.delimiter.close);
+ self.token_trees.iter_mut().for_each(|tt| match tt {
+ crate::TokenTree::Leaf(leaf) => match leaf {
+ crate::Leaf::Literal(it) => it.span = f(it.span),
+ crate::Leaf::Punct(it) => it.span = f(it.span),
+ crate::Leaf::Ident(it) => it.span = f(it.span),
+ },
+ crate::TokenTree::Subtree(s) => s.visit_ids(f),
+ })
}
}
}
@@ -107,7 +122,6 @@ impl_from!(Literal<Span>, Punct<Span>, Ident<Span> for Leaf);
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Subtree<Span> {
- // FIXME, this should not be Option
pub delimiter: Delimiter<Span>,
pub token_trees: Vec<TokenTree<Span>>,
}
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index 5d61a2272..95c514251 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -15,7 +15,8 @@ doctest = false
tracing = "0.1.35"
walkdir = "2.3.2"
crossbeam-channel = "0.5.5"
-notify = "5.0"
+# We demand 5.1.0 as any higher version pulls in a new windows-sys dupe
+notify = "=5.1.0"
stdx.workspace = true
vfs.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
index 3ae3dc83c..c35785cf9 100644
--- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
fst = "0.4.7"
-indexmap = "1.9.1"
+indexmap = "2.0.0"
nohash-hasher.workspace = true
paths.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
index d327f2edf..52ada32bd 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
@@ -292,8 +292,8 @@ impl From<AbsPathBuf> for VfsPath {
impl fmt::Display for VfsPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
- VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f),
- VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f),
+ VfsPathRepr::PathBuf(it) => it.fmt(f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => it.fmt(f),
}
}
}
@@ -307,8 +307,8 @@ impl fmt::Debug for VfsPath {
impl fmt::Debug for VfsPathRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self {
- VfsPathRepr::PathBuf(it) => fmt::Debug::fmt(&it.display(), f),
- VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Debug::fmt(&it, f),
+ VfsPathRepr::PathBuf(it) => it.fmt(f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => it.fmt(f),
}
}
}
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
index bc58aa722..024acb877 100644
--- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
<!---
-lsp_ext.rs hash: 2d60bbffe70ae198
+lsp_ext.rs hash: 149a5be3c5e469d1
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@@ -886,3 +886,48 @@ export interface FetchDependencyListResult {
}
```
Returns all crates from this workspace, so it can be used create a viewTree to help navigate the dependency tree.
+
+## View Recursive Memory Layout
+
+**Method:** `rust-analyzer/viewRecursiveMemoryLayout`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:**
+
+```typescript
+export interface RecursiveMemoryLayoutNode = {
+ /// Name of the item, or [ROOT], `.n` for tuples
+ item_name: string;
+ /// Full name of the type (type aliases are ignored)
+ typename: string;
+ /// Size of the type in bytes
+ size: number;
+ /// Alignment of the type in bytes
+ alignment: number;
+ /// Offset of the type relative to its parent (or 0 if its the root)
+ offset: number;
+ /// Index of the node's parent (or -1 if its the root)
+ parent_idx: number;
+ /// Index of the node's children (or -1 if it does not have children)
+ children_start: number;
+ /// Number of child nodes (unspecified it does not have children)
+ children_len: number;
+};
+
+export interface RecursiveMemoryLayout = {
+ nodes: RecursiveMemoryLayoutNode[];
+};
+```
+
+Returns a vector of nodes representing items in the datatype as a tree, `RecursiveMemoryLayout::nodes[0]` is the root node.
+
+If `RecursiveMemoryLayout::nodes::length == 0` we could not find a suitable type.
+
+Generic Types do not give anything because they are incomplete. Fully specified generic types do not give anything if they are selected directly but do work when a child of other types [this is consistent with other behavior](https://github.com/rust-lang/rust-analyzer/issues/15010).
+
+### Unresolved questions:
+
+- How should enums/unions be represented? currently they do not produce any children because they have multiple distinct sets of children.
+- Should niches be represented? currently they are not reported.
+- A visual representation of the memory layout is not specified, see the provided implementation for an example, however it may not translate well to terminal based editors or other such things.
diff --git a/src/tools/rust-analyzer/docs/dev/style.md b/src/tools/rust-analyzer/docs/dev/style.md
index d2a03fba4..786127639 100644
--- a/src/tools/rust-analyzer/docs/dev/style.md
+++ b/src/tools/rust-analyzer/docs/dev/style.md
@@ -869,6 +869,19 @@ type -> ty
**Rationale:** consistency.
+## Error Handling Trivia
+
+Use `anyhow::Result` rather than just `Result`.
+
+**Rationale:** makes it immediately clear what result that is.
+
+Use `anyhow::format_err!` rather than `anyhow::anyhow`.
+
+**Rationale:** consistent, boring, avoids stuttering.
+
+There's no specific guidance on the formatting of error messages, see [anyhow/#209](https://github.com/dtolnay/anyhow/issues/209).
+Do not end error and context messages with `.` though.
+
## Early Returns
Do use early returns
diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc
index b5c095fd9..5dafd1a4c 100644
--- a/src/tools/rust-analyzer/docs/user/manual.adoc
+++ b/src/tools/rust-analyzer/docs/user/manual.adoc
@@ -64,22 +64,8 @@ You can install the latest release of the plugin from
https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
Note that the plugin may cause conflicts with the
-https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
-It is recommended to disable the Rust plugin when using the rust-analyzer extension.
-
-By default, the plugin will prompt you to download the matching version of the server as well:
-
-image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
-
-[NOTE]
-====
-To disable this notification put the following to `settings.json`
-
-[source,json]
-----
-{ "rust-analyzer.updates.askBeforeDownload": false }
-----
-====
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[previous official Rust plugin].
+The latter is no longer maintained and should be uninstalled.
The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
@@ -141,6 +127,9 @@ If you're not using Code, you can compile and install only the LSP server:
$ cargo xtask install --server
----
+Make sure that `.cargo/bin` is in `$PATH` and precedes paths where `rust-analyzer` may also be installed.
+Specifically, `rustup` includes a proxy called `rust-analyzer`, which can cause problems if you're planning to use a source build or even a downloaded binary.
+
=== rust-analyzer Language Server Binary
Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
@@ -280,12 +269,12 @@ Also see the https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/[rust-a
Note the excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm] on how to set-up Emacs for Rust development with LSP mode and several other packages.
-=== Vim/NeoVim
+=== Vim/Neovim
Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
-There are several LSP client implementations for vim or neovim:
+There are several LSP client implementations for Vim or Neovim:
==== coc-rust-analyzer
@@ -308,7 +297,7 @@ Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected
https://github.com/autozimu/LanguageClient-neovim[here]
* The GitHub project wiki has extra tips on configuration
-2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
+2. Configure by adding this to your Vim/Neovim config file (replacing the existing Rust-specific line if it exists):
+
[source,vim]
----
@@ -335,7 +324,7 @@ let g:ale_linters = {'rust': ['analyzer']}
==== nvim-lsp
-NeoVim 0.5 has built-in language server support.
+Neovim 0.5 has built-in language server support.
For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
@@ -376,7 +365,7 @@ EOF
See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
-Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
+Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for Neovim.
==== vim-lsp
@@ -460,27 +449,24 @@ You'll need to close and reopen all .rs and Cargo files, or to restart the IDE,
Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
-Earlier versions allow you to use rust-analyzer through a simple settings change.
-In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
-Then in the configuration replace:
-[source,json]
-----
- "rust": {
- "command": ["rls"],
- "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
- "url": "https://github.com/rust-lang/rls",
- "highlightingModeRegex": "^Rust$"
- },
-----
-With
+To change rust-analyzer config options, start from the following example and put it into Kate's "User Server Settings" tab (located under the LSP Client settings):
[source,json]
----
+{
+ "servers": {
"rust": {
- "command": ["rust-analyzer"],
- "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
- "url": "https://github.com/rust-lang/rust-analyzer",
- "highlightingModeRegex": "^Rust$"
- },
+ "initializationOptions": {
+ "cachePriming": {
+ "enable": false
+ },
+ "check": {
+ "allTargets": false
+ },
+ "checkOnSave": false
+ }
+ }
+ }
+}
----
Then click on apply, and restart the LSP server for your rust project.
@@ -933,17 +919,17 @@ For example:
More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
==== Setting runnable environment variables
-You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
+You can use "rust-analyzer.runnables.extraEnv" setting to define runnable environment-specific substitution variables.
The simplest way for all runnables in a bunch:
```jsonc
-"rust-analyzer.runnableEnv": {
+"rust-analyzer.runnables.extraEnv": {
"RUN_SLOW_TESTS": "1"
}
```
Or it is possible to specify vars more granularly:
```jsonc
-"rust-analyzer.runnableEnv": [
+"rust-analyzer.runnables.extraEnv": [
{
// "mask": null, // null mask means that this rule will be applied for all runnables
env: {
@@ -963,6 +949,29 @@ Or it is possible to specify vars more granularly:
You can use any valid regular expression as a mask.
Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
+If needed, you can set different values for different platforms:
+```jsonc
+"rust-analyzer.runnables.extraEnv": [
+ {
+ "platform": "win32", // windows only
+ env: {
+ "APP_DATA": "windows specific data"
+ }
+ },
+ {
+ "platform": ["linux"],
+ "env": {
+ "APP_DATA": "linux data",
+ }
+ },
+ { // for all platforms
+ "env": {
+ "APP_COMMON_DATA": "xxx",
+ }
+ }
+]
+```
+
==== Compiler feedback from external commands
Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
diff --git a/src/tools/rust-analyzer/lib/README.md b/src/tools/rust-analyzer/lib/README.md
index ed55e31d6..d420eeb96 100644
--- a/src/tools/rust-analyzer/lib/README.md
+++ b/src/tools/rust-analyzer/lib/README.md
@@ -3,3 +3,12 @@
Crates in this directory are published to [crates.io](https://crates.io) and obey semver.
They _could_ live in a separate repo, but we want to experiment with a monorepo setup.
+
+We use these crates from crates.io, not the local copies because we want to ensure that
+rust-analyzer works with the versions that are published. This means if you add a new API to these
+crates, you need to release a new version to crates.io before you can use that API in rust-analyzer.
+
+To release new versions of these packages, change their version in Cargo.toml. Once your PR is merged into master a workflow will automatically publish the new version to crates.io.
+
+While prototyping, the local versions can be used by uncommenting the relevant lines in the
+`[patch.'crates-io']` section in Cargo.toml
diff --git a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
index ec5ba8ba0..01f2b87b3 100644
--- a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "la-arena"
-version = "0.3.0"
+version = "0.3.1"
description = "Simple index-based arena without deletion."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/la-arena"
diff --git a/src/tools/rust-analyzer/lib/line-index/Cargo.toml b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
index 019ad3a53..6c0d06f47 100644
--- a/src/tools/rust-analyzer/lib/line-index/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
@@ -1,11 +1,11 @@
[package]
name = "line-index"
-version = "0.1.0-pre.1"
+version = "0.1.0"
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
edition = "2021"
[dependencies]
-text-size.workspace = true
-nohash-hasher.workspace = true
+text-size = "1.1.0"
+nohash-hasher = "0.2.0"
diff --git a/src/tools/rust-analyzer/lib/line-index/src/lib.rs b/src/tools/rust-analyzer/lib/line-index/src/lib.rs
index ad67d3f24..03371c9c8 100644
--- a/src/tools/rust-analyzer/lib/line-index/src/lib.rs
+++ b/src/tools/rust-analyzer/lib/line-index/src/lib.rs
@@ -94,44 +94,7 @@ pub struct LineIndex {
impl LineIndex {
/// Returns a `LineIndex` for the `text`.
pub fn new(text: &str) -> LineIndex {
- let mut newlines = Vec::<TextSize>::with_capacity(16);
- let mut line_wide_chars = IntMap::<u32, Box<[WideChar]>>::default();
-
- let mut wide_chars = Vec::<WideChar>::new();
- let mut cur_row = TextSize::from(0);
- let mut cur_col = TextSize::from(0);
- let mut line = 0u32;
-
- for c in text.chars() {
- let c_len = TextSize::of(c);
- cur_row += c_len;
- if c == '\n' {
- newlines.push(cur_row);
-
- // Save any wide characters seen in the previous line
- if !wide_chars.is_empty() {
- let cs = std::mem::take(&mut wide_chars).into_boxed_slice();
- line_wide_chars.insert(line, cs);
- }
-
- // Prepare for processing the next line
- cur_col = TextSize::from(0);
- line += 1;
- continue;
- }
-
- if !c.is_ascii() {
- wide_chars.push(WideChar { start: cur_col, end: cur_col + c_len });
- }
-
- cur_col += c_len;
- }
-
- // Save any wide characters seen in the last line
- if !wide_chars.is_empty() {
- line_wide_chars.insert(line, wide_chars.into_boxed_slice());
- }
-
+ let (newlines, line_wide_chars) = analyze_source_file(text);
LineIndex {
newlines: newlines.into_boxed_slice(),
line_wide_chars,
@@ -235,3 +198,182 @@ impl LineIndex {
self.len
}
}
+
+/// This is adapted from the rustc_span crate, https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs
+fn analyze_source_file(src: &str) -> (Vec<TextSize>, IntMap<u32, Box<[WideChar]>>) {
+ assert!(src.len() < !0u32 as usize);
+ let mut lines = vec![];
+ let mut line_wide_chars = IntMap::<u32, Vec<WideChar>>::default();
+
+ // Calls the right implementation, depending on hardware support available.
+ analyze_source_file_dispatch(src, &mut lines, &mut line_wide_chars);
+
+ (lines, line_wide_chars.into_iter().map(|(k, v)| (k, v.into_boxed_slice())).collect())
+}
+
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+fn analyze_source_file_dispatch(
+ src: &str,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) {
+ if is_x86_feature_detected!("sse2") {
+ // SAFETY: SSE2 support was checked
+ unsafe {
+ analyze_source_file_sse2(src, lines, multi_byte_chars);
+ }
+ } else {
+ analyze_source_file_generic(src, src.len(), TextSize::from(0), lines, multi_byte_chars);
+ }
+}
+
+/// Checks 16 byte chunks of text at a time. If the chunk contains
+/// something other than printable ASCII characters and newlines, the
+/// function falls back to the generic implementation. Otherwise it uses
+/// SSE2 intrinsics to quickly find all newlines.
+#[target_feature(enable = "sse2")]
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+unsafe fn analyze_source_file_sse2(
+ src: &str,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) {
+ #[cfg(target_arch = "x86")]
+ use std::arch::x86::*;
+ #[cfg(target_arch = "x86_64")]
+ use std::arch::x86_64::*;
+
+ const CHUNK_SIZE: usize = 16;
+
+ let src_bytes = src.as_bytes();
+
+ let chunk_count = src.len() / CHUNK_SIZE;
+
+ // This variable keeps track of where we should start decoding a
+ // chunk. If a multi-byte character spans across chunk boundaries,
+ // we need to skip that part in the next chunk because we already
+ // handled it.
+ let mut intra_chunk_offset = 0;
+
+ for chunk_index in 0..chunk_count {
+ let ptr = src_bytes.as_ptr() as *const __m128i;
+ // We don't know if the pointer is aligned to 16 bytes, so we
+ // use `loadu`, which supports unaligned loading.
+ let chunk = _mm_loadu_si128(ptr.add(chunk_index));
+
+ // For character in the chunk, see if its byte value is < 0, which
+ // indicates that it's part of a UTF-8 char.
+ let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0));
+ // Create a bit mask from the comparison results.
+ let multibyte_mask = _mm_movemask_epi8(multibyte_test);
+
+ // If the bit mask is all zero, we only have ASCII chars here:
+ if multibyte_mask == 0 {
+ assert!(intra_chunk_offset == 0);
+
+ // Check for newlines in the chunk
+ let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8));
+ let newlines_mask = _mm_movemask_epi8(newlines_test);
+
+ if newlines_mask != 0 {
+ // All control characters are newlines, record them
+ let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32;
+ let output_offset = TextSize::from((chunk_index * CHUNK_SIZE + 1) as u32);
+
+ loop {
+ let index = newlines_mask.trailing_zeros();
+
+ if index >= CHUNK_SIZE as u32 {
+ // We have arrived at the end of the chunk.
+ break;
+ }
+
+ lines.push(TextSize::from(index) + output_offset);
+
+ // Clear the bit, so we can find the next one.
+ newlines_mask &= (!1) << index;
+ }
+ }
+ continue;
+ }
+
+ // The slow path.
+ // There are control chars in here, fallback to generic decoding.
+ let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset;
+ intra_chunk_offset = analyze_source_file_generic(
+ &src[scan_start..],
+ CHUNK_SIZE - intra_chunk_offset,
+ TextSize::from(scan_start as u32),
+ lines,
+ multi_byte_chars,
+ );
+ }
+
+ // There might still be a tail left to analyze
+ let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
+ if tail_start < src.len() {
+ analyze_source_file_generic(
+ &src[tail_start..],
+ src.len() - tail_start,
+ TextSize::from(tail_start as u32),
+ lines,
+ multi_byte_chars,
+ );
+ }
+}
+
+#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+// The target (or compiler version) does not support SSE2 ...
+fn analyze_source_file_dispatch(
+ src: &str,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) {
+ analyze_source_file_generic(src, src.len(), TextSize::from(0), lines, multi_byte_chars);
+}
+
+// `scan_len` determines the number of bytes in `src` to scan. Note that the
+// function can read past `scan_len` if a multi-byte character start within the
+// range but extends past it. The overflow is returned by the function.
+fn analyze_source_file_generic(
+ src: &str,
+ scan_len: usize,
+ output_offset: TextSize,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) -> usize {
+ assert!(src.len() >= scan_len);
+ let mut i = 0;
+ let src_bytes = src.as_bytes();
+
+ while i < scan_len {
+ let byte = unsafe {
+ // We verified that i < scan_len <= src.len()
+ *src_bytes.get_unchecked(i)
+ };
+
+ // How much to advance in order to get to the next UTF-8 char in the
+ // string.
+ let mut char_len = 1;
+
+ if byte == b'\n' {
+ lines.push(TextSize::from(i as u32 + 1) + output_offset);
+ } else if byte >= 127 {
+ // The slow path: Just decode to `char`.
+ let c = src[i..].chars().next().unwrap();
+ char_len = c.len_utf8();
+
+ let pos = TextSize::from(i as u32) + output_offset;
+
+ if char_len > 1 {
+ assert!((2..=4).contains(&char_len));
+ let mbc = WideChar { start: pos, end: pos + TextSize::from(char_len as u32) };
+ multi_byte_chars.entry(lines.len() as u32).or_default().push(mbc);
+ }
+ }
+
+ i += char_len;
+ }
+
+ i - scan_len
+}
diff --git a/src/tools/rust-analyzer/lib/line-index/src/tests.rs b/src/tools/rust-analyzer/lib/line-index/src/tests.rs
index 31c01c20e..8f3762d19 100644
--- a/src/tools/rust-analyzer/lib/line-index/src/tests.rs
+++ b/src/tools/rust-analyzer/lib/line-index/src/tests.rs
@@ -1,11 +1,120 @@
-use super::LineIndex;
-
-#[test]
-fn test_empty_index() {
- let col_index = LineIndex::new(
- "
-const C: char = 'x';
-",
- );
- assert_eq!(col_index.line_wide_chars.len(), 0);
+use crate::{LineIndex, TextSize, WideChar};
+
+macro_rules! test {
+ (
+ case: $test_name:ident,
+ text: $text:expr,
+ lines: $lines:expr,
+ multi_byte_chars: $multi_byte_chars:expr,
+ ) => {
+ #[test]
+ fn $test_name() {
+ let line_index = LineIndex::new($text);
+
+ let expected_lines: Vec<TextSize> =
+ $lines.into_iter().map(<TextSize as From<u32>>::from).collect();
+
+ assert_eq!(&*line_index.newlines, &*expected_lines);
+
+ let expected_mbcs: Vec<_> = $multi_byte_chars
+ .into_iter()
+ .map(|(line, (pos, end)): (u32, (u32, u32))| {
+ (line, WideChar { start: TextSize::from(pos), end: TextSize::from(end) })
+ })
+ .collect();
+
+ assert_eq!(
+ line_index
+ .line_wide_chars
+ .iter()
+ .flat_map(|(line, val)| std::iter::repeat(*line).zip(val.iter().copied()))
+ .collect::<Vec<_>>(),
+ expected_mbcs
+ );
+ }
+ };
}
+
+test!(
+ case: empty_text,
+ text: "",
+ lines: vec![],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: newlines_short,
+ text: "a\nc",
+ lines: vec![2],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: newlines_long,
+ text: "012345678\nabcdef012345678\na",
+ lines: vec![10, 26],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: newline_and_multi_byte_char_in_same_chunk,
+ text: "01234β789\nbcdef0123456789abcdef",
+ lines: vec![11],
+ multi_byte_chars: vec![(0, (5, 7))],
+);
+
+test!(
+ case: newline_and_control_char_in_same_chunk,
+ text: "01234\u{07}6789\nbcdef0123456789abcdef",
+ lines: vec![11],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_short,
+ text: "aβc",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (1, 3))],
+);
+
+test!(
+ case: multi_byte_char_long,
+ text: "0123456789abcΔf012345β",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (13, 15)), (0, (22, 24))],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary,
+ text: "0123456789abcdeΔ123456789abcdef01234",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (15, 17))],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary_tail,
+ text: "0123456789abcdeΔ....",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (15, 17))],
+);
+
+test!(
+ case: multi_byte_with_new_lines,
+ text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
+ lines: vec![7, 27],
+ multi_byte_chars: vec![(1, (13, 15)), (2, (29, 31))],
+);
+
+test!(
+ case: trailing_newline,
+ text: "0123456789\n",
+ lines: vec![11],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: trailing_newline_chunk_boundary,
+ text: "0123456789abcde\n",
+ lines: vec![16],
+ multi_byte_chars: vec![],
+);
diff --git a/src/tools/rust-analyzer/lib/line-index/tests/it.rs b/src/tools/rust-analyzer/lib/line-index/tests/it.rs
deleted file mode 100644
index ce1c0bc6f..000000000
--- a/src/tools/rust-analyzer/lib/line-index/tests/it.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-use line_index::{LineCol, LineIndex, TextRange};
-
-#[test]
-fn test_line_index() {
- let text = "hello\nworld";
- let table = [
- (00, 0, 0),
- (01, 0, 1),
- (05, 0, 5),
- (06, 1, 0),
- (07, 1, 1),
- (08, 1, 2),
- (10, 1, 4),
- (11, 1, 5),
- ];
-
- let index = LineIndex::new(text);
- for (offset, line, col) in table {
- assert_eq!(index.line_col(offset.into()), LineCol { line, col });
- }
-
- let text = "\nhello\nworld";
- let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
- let index = LineIndex::new(text);
- for (offset, line, col) in table {
- assert_eq!(index.line_col(offset.into()), LineCol { line, col });
- }
-}
-
-#[test]
-fn test_char_len() {
- assert_eq!('メ'.len_utf8(), 3);
- assert_eq!('メ'.len_utf16(), 1);
-}
-
-#[test]
-fn test_splitlines() {
- fn r(lo: u32, hi: u32) -> TextRange {
- TextRange::new(lo.into(), hi.into())
- }
-
- let text = "a\nbb\nccc\n";
- let line_index = LineIndex::new(text);
-
- let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
- let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
- assert_eq!(actual, expected);
-
- let text = "";
- let line_index = LineIndex::new(text);
-
- let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
- let expected = vec![];
- assert_eq!(actual, expected);
-
- let text = "\n";
- let line_index = LineIndex::new(text);
-
- let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
- let expected = vec![r(0, 1)];
- assert_eq!(actual, expected)
-}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
index e78a9d2eb..01707d301 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "lsp-server"
-version = "0.7.0"
+version = "0.7.2"
description = "Generic LSP server scaffold."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
@@ -8,8 +8,8 @@ edition = "2021"
[dependencies]
log = "0.4.17"
-serde_json.workspace = true
-serde.workspace = true
+serde_json = "1.0.96"
+serde = { version = "1.0.156", features = ["derive"] }
crossbeam-channel = "0.5.6"
[dev-dependencies]
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
index b241561f9..730ad51f4 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
@@ -265,7 +265,7 @@ fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> {
let header_name = parts.next().unwrap();
let header_value =
parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?;
- if header_name == "Content-Length" {
+ if header_name.eq_ignore_ascii_case("Content-Length") {
size = Some(header_value.parse::<usize>().map_err(invalid_data)?);
}
}
diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml
index a910e012b..f0cd35399 100644
--- a/src/tools/rust-analyzer/triagebot.toml
+++ b/src/tools/rust-analyzer/triagebot.toml
@@ -9,3 +9,7 @@ allow-unauthenticated = [
[autolabel."S-waiting-on-review"]
new_pr = true
+
+[no-merges]
+exclude_labels = ["sync"]
+labels = ["has-merge-commits", "S-waiting-on-author"]
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
index b4b294c30..7a34617e2 100644
--- a/src/tools/rust-analyzer/xtask/Cargo.toml
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.1.0"
publish = false
license = "MIT OR Apache-2.0"
edition = "2021"
-rust-version = "1.65"
+rust-version.workspace = true
[dependencies]
anyhow = "1.0.62"
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
index 210047970..7720ad69f 100644
--- a/src/tools/rust-analyzer/xtask/src/flags.rs
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -1,5 +1,7 @@
#![allow(unreachable_pub)]
+use std::str::FromStr;
+
use crate::install::{ClientOpt, Malloc, ServerOpt};
xflags::xflags! {
@@ -42,7 +44,7 @@ xflags::xflags! {
required changelog: String
}
cmd metrics {
- optional --dry-run
+ optional measurement_type: MeasurementType
}
/// Builds a benchmark version of rust-analyzer and puts it into `./target`.
cmd bb {
@@ -106,8 +108,31 @@ pub struct PublishReleaseNotes {
}
#[derive(Debug)]
+pub enum MeasurementType {
+ Build,
+ AnalyzeSelf,
+ AnalyzeRipgrep,
+ AnalyzeWebRender,
+ AnalyzeDiesel,
+}
+
+impl FromStr for MeasurementType {
+ type Err = String;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "build" => Ok(Self::Build),
+ "self" => Ok(Self::AnalyzeSelf),
+ "ripgrep" => Ok(Self::AnalyzeRipgrep),
+ "webrender" => Ok(Self::AnalyzeWebRender),
+ "diesel" => Ok(Self::AnalyzeDiesel),
+ _ => Err("Invalid option".to_string()),
+ }
+ }
+}
+
+#[derive(Debug)]
pub struct Metrics {
- pub dry_run: bool,
+ pub measurement_type: Option<MeasurementType>,
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
index 83223a551..e8c00c72e 100644
--- a/src/tools/rust-analyzer/xtask/src/install.rs
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -2,13 +2,13 @@
use std::{env, path::PathBuf, str};
-use anyhow::{bail, format_err, Context, Result};
+use anyhow::{bail, format_err, Context};
use xshell::{cmd, Shell};
use crate::flags;
impl flags::Install {
- pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
if cfg!(target_os = "macos") {
fix_path_for_mac(sh).context("Fix path for mac")?;
}
@@ -39,7 +39,7 @@ pub(crate) enum Malloc {
Jemalloc,
}
-fn fix_path_for_mac(sh: &Shell) -> Result<()> {
+fn fix_path_for_mac(sh: &Shell) -> anyhow::Result<()> {
let mut vscode_path: Vec<PathBuf> = {
const COMMON_APP_PATH: &str =
r"/Applications/Visual Studio Code.app/Contents/Resources/app/bin";
@@ -68,7 +68,7 @@ fn fix_path_for_mac(sh: &Shell) -> Result<()> {
Ok(())
}
-fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> {
+fn install_client(sh: &Shell, client_opt: ClientOpt) -> anyhow::Result<()> {
let _dir = sh.push_dir("./editors/code");
// Package extension.
@@ -129,7 +129,7 @@ fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> {
Ok(())
}
-fn install_server(sh: &Shell, opts: ServerOpt) -> Result<()> {
+fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> {
let features = match opts.malloc {
Malloc::System => &[][..],
Malloc::Mimalloc => &["--features", "mimalloc"],
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
index b6f730dbf..685374231 100644
--- a/src/tools/rust-analyzer/xtask/src/metrics.rs
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -1,6 +1,6 @@
use std::{
collections::BTreeMap,
- env, fs,
+ fs,
io::Write as _,
path::Path,
time::{Instant, SystemTime, UNIX_EPOCH},
@@ -9,16 +9,13 @@ use std::{
use anyhow::{bail, format_err};
use xshell::{cmd, Shell};
-use crate::flags;
+use crate::flags::{self, MeasurementType};
type Unit = String;
impl flags::Metrics {
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
let mut metrics = Metrics::new(sh)?;
- if !self.dry_run {
- sh.remove_path("./target/release")?;
- }
if !Path::new("./target/rustc-perf").exists() {
sh.create_dir("./target/rustc-perf")?;
cmd!(sh, "git clone https://github.com/rust-lang/rustc-perf.git ./target/rustc-perf")
@@ -32,38 +29,47 @@ impl flags::Metrics {
let _env = sh.push_env("RA_METRICS", "1");
- {
- // https://github.com/rust-lang/rust-analyzer/issues/9997
- let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender");
- cmd!(sh, "cargo update -p url --precise 1.6.1").run()?;
- }
- metrics.measure_build(sh)?;
- metrics.measure_analysis_stats_self(sh)?;
- metrics.measure_analysis_stats(sh, "ripgrep")?;
- metrics.measure_analysis_stats(sh, "webrender")?;
- metrics.measure_analysis_stats(sh, "diesel/diesel")?;
-
- if !self.dry_run {
- let _d = sh.push_dir("target");
- let metrics_token = env::var("METRICS_TOKEN").unwrap();
- cmd!(
- sh,
- "git clone --depth 1 https://{metrics_token}@github.com/rust-analyzer/metrics.git"
- )
- .run()?;
-
- {
- let mut file =
- fs::File::options().append(true).open("target/metrics/metrics.json")?;
- writeln!(file, "{}", metrics.json())?;
+ let filename = match self.measurement_type {
+ Some(ms) => match ms {
+ MeasurementType::Build => {
+ metrics.measure_build(sh)?;
+ "build.json"
+ }
+ MeasurementType::AnalyzeSelf => {
+ metrics.measure_analysis_stats_self(sh)?;
+ "self.json"
+ }
+ MeasurementType::AnalyzeRipgrep => {
+ metrics.measure_analysis_stats(sh, "ripgrep")?;
+ "ripgrep.json"
+ }
+ MeasurementType::AnalyzeWebRender => {
+ {
+ // https://github.com/rust-lang/rust-analyzer/issues/9997
+ let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender");
+ cmd!(sh, "cargo update -p url --precise 1.6.1").run()?;
+ }
+ metrics.measure_analysis_stats(sh, "webrender")?;
+ "webrender.json"
+ }
+ MeasurementType::AnalyzeDiesel => {
+ metrics.measure_analysis_stats(sh, "diesel/diesel")?;
+ "diesel.json"
+ }
+ },
+ None => {
+ metrics.measure_build(sh)?;
+ metrics.measure_analysis_stats_self(sh)?;
+ metrics.measure_analysis_stats(sh, "ripgrep")?;
+ metrics.measure_analysis_stats(sh, "webrender")?;
+ metrics.measure_analysis_stats(sh, "diesel/diesel")?;
+ "all.json"
}
+ };
- let _d = sh.push_dir("metrics");
- cmd!(sh, "git add .").run()?;
- cmd!(sh, "git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈")
- .run()?;
- cmd!(sh, "git push origin master").run()?;
- }
+ let mut file =
+ fs::File::options().write(true).create(true).open(format!("target/{}", filename))?;
+ writeln!(file, "{}", metrics.json())?;
eprintln!("{metrics:#?}");
Ok(())
}
diff --git a/src/tools/rust-analyzer/xtask/src/publish.rs b/src/tools/rust-analyzer/xtask/src/publish.rs
index cdb7d8fac..7faae9b20 100644
--- a/src/tools/rust-analyzer/xtask/src/publish.rs
+++ b/src/tools/rust-analyzer/xtask/src/publish.rs
@@ -1,12 +1,12 @@
mod notes;
use crate::flags;
-use anyhow::{anyhow, bail, Result};
+use anyhow::bail;
use std::env;
use xshell::{cmd, Shell};
impl flags::PublishReleaseNotes {
- pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
let asciidoc = sh.read_file(&self.changelog)?;
let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?;
let file_name = check_file_name(self.changelog)?;
@@ -24,11 +24,11 @@ impl flags::PublishReleaseNotes {
}
}
-fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> Result<String> {
+fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> anyhow::Result<String> {
let file_name = path
.as_ref()
.file_name()
- .ok_or_else(|| anyhow!("file name is not specified as `changelog`"))?
+ .ok_or_else(|| anyhow::format_err!("file name is not specified as `changelog`"))?
.to_string_lossy();
let mut chars = file_name.chars();
@@ -61,7 +61,7 @@ fn create_original_changelog_url(file_name: &str) -> String {
format!("https://rust-analyzer.github.io/thisweek/{year}/{month}/{day}/{stem}.html")
}
-fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> {
+fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> anyhow::Result<()> {
let token = match env::var("GITHUB_TOKEN") {
Ok(token) => token,
Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
@@ -79,6 +79,7 @@ fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()>
let release_id = cmd!(sh, "jq .id").stdin(release_json).read()?;
let mut patch = String::new();
+ // note: the GitHub API doesn't update the target commit if the tag already exists
write_json::object(&mut patch)
.string("tag_name", tag_name)
.string("target_commitish", "master")
diff --git a/src/tools/rust-installer/install-template.sh b/src/tools/rust-installer/install-template.sh
index 6415644e0..b477c3eac 100644
--- a/src/tools/rust-installer/install-template.sh
+++ b/src/tools/rust-installer/install-template.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
# No undefined variables
set -u
diff --git a/src/tools/rustfmt/src/expr.rs b/src/tools/rustfmt/src/expr.rs
index 5b1b4fbd4..c3c07f310 100644
--- a/src/tools/rustfmt/src/expr.rs
+++ b/src/tools/rustfmt/src/expr.rs
@@ -256,7 +256,7 @@ pub(crate) fn format_expr(
shape,
SeparatorPlace::Back,
),
- ast::ExprKind::Index(ref expr, ref index) => {
+ ast::ExprKind::Index(ref expr, ref index, _) => {
rewrite_index(&**expr, &**index, context, shape)
}
ast::ExprKind::Repeat(ref expr, ref repeats) => rewrite_pair(
@@ -1342,7 +1342,7 @@ pub(crate) fn is_simple_expr(expr: &ast::Expr) -> bool {
| ast::ExprKind::Field(ref expr, _)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr) => is_simple_expr(expr),
- ast::ExprKind::Index(ref lhs, ref rhs) => is_simple_expr(lhs) && is_simple_expr(rhs),
+ ast::ExprKind::Index(ref lhs, ref rhs, _) => is_simple_expr(lhs) && is_simple_expr(rhs),
ast::ExprKind::Repeat(ref lhs, ref rhs) => {
is_simple_expr(lhs) && is_simple_expr(&*rhs.value)
}
@@ -1382,12 +1382,8 @@ pub(crate) fn can_be_overflowed_expr(
|| (context.use_block_indent() && args_len == 1)
}
ast::ExprKind::MacCall(ref mac) => {
- match (
- rustc_ast::ast::MacDelimiter::from_token(mac.args.delim.to_token()),
- context.config.overflow_delimited_expr(),
- ) {
- (Some(ast::MacDelimiter::Bracket), true)
- | (Some(ast::MacDelimiter::Brace), true) => true,
+ match (mac.args.delim, context.config.overflow_delimited_expr()) {
+ (Delimiter::Bracket, true) | (Delimiter::Brace, true) => true,
_ => context.use_block_indent() && args_len == 1,
}
}
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index e9a298a27..4f45d0c74 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -13,7 +13,7 @@ use std::collections::HashMap;
use std::panic::{catch_unwind, AssertUnwindSafe};
use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
+use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
use rustc_ast::{ast, ptr};
use rustc_ast_pretty::pprust;
use rustc_span::{
@@ -394,7 +394,7 @@ pub(crate) fn rewrite_macro_def(
}
let ts = def.body.tokens.clone();
- let mut parser = MacroParser::new(ts.into_trees());
+ let mut parser = MacroParser::new(ts.trees());
let parsed_def = match parser.parse() {
Some(def) => def,
None => return snippet,
@@ -736,9 +736,9 @@ impl MacroArgParser {
self.buf.clear();
}
- fn add_meta_variable(&mut self, iter: &mut TokenTreeCursor) -> Option<()> {
+ fn add_meta_variable(&mut self, iter: &mut RefTokenTreeCursor<'_>) -> Option<()> {
match iter.next() {
- Some(TokenTree::Token(
+ Some(&TokenTree::Token(
Token {
kind: TokenKind::Ident(name, _),
..
@@ -768,7 +768,7 @@ impl MacroArgParser {
&mut self,
inner: Vec<ParsedMacroArg>,
delim: Delimiter,
- iter: &mut TokenTreeCursor,
+ iter: &mut RefTokenTreeCursor<'_>,
) -> Option<()> {
let mut buffer = String::new();
let mut first = true;
@@ -868,11 +868,11 @@ impl MacroArgParser {
/// Returns a collection of parsed macro def's arguments.
fn parse(mut self, tokens: TokenStream) -> Option<Vec<ParsedMacroArg>> {
- let mut iter = tokens.into_trees();
+ let mut iter = tokens.trees();
while let Some(tok) = iter.next() {
match tok {
- TokenTree::Token(
+ &TokenTree::Token(
Token {
kind: TokenKind::Dollar,
span,
@@ -901,7 +901,7 @@ impl MacroArgParser {
self.add_meta_variable(&mut iter)?;
}
TokenTree::Token(ref t, _) => self.update_buffer(t),
- TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
+ &TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
if !self.buf.is_empty() {
if next_space(&self.last_tok.kind) == SpaceState::Always {
self.add_separator();
@@ -1119,12 +1119,12 @@ pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> D
// A very simple parser that just parses a macros 2.0 definition into its branches.
// Currently we do not attempt to parse any further than that.
-struct MacroParser {
- toks: TokenTreeCursor,
+struct MacroParser<'a> {
+ toks: RefTokenTreeCursor<'a>,
}
-impl MacroParser {
- const fn new(toks: TokenTreeCursor) -> Self {
+impl<'a> MacroParser<'a> {
+ const fn new(toks: RefTokenTreeCursor<'a>) -> Self {
Self { toks }
}
@@ -1143,9 +1143,9 @@ impl MacroParser {
let tok = self.toks.next()?;
let (lo, args_paren_kind) = match tok {
TokenTree::Token(..) => return None,
- TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d),
+ &TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d),
};
- let args = TokenStream::new(vec![tok]);
+ let args = TokenStream::new(vec![tok.clone()]);
match self.toks.next()? {
TokenTree::Token(
Token {
diff --git a/src/tools/rustfmt/src/matches.rs b/src/tools/rustfmt/src/matches.rs
index aac5e59b8..4c37116f1 100644
--- a/src/tools/rustfmt/src/matches.rs
+++ b/src/tools/rustfmt/src/matches.rs
@@ -594,7 +594,7 @@ fn can_flatten_block_around_this(body: &ast::Expr) -> bool {
ast::ExprKind::AddrOf(_, _, ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Unary(_, ref expr)
- | ast::ExprKind::Index(ref expr, _)
+ | ast::ExprKind::Index(ref expr, _, _)
| ast::ExprKind::Cast(ref expr, _) => can_flatten_block_around_this(expr),
_ => false,
}
diff --git a/src/tools/rustfmt/src/parse/macros/mod.rs b/src/tools/rustfmt/src/parse/macros/mod.rs
index 67f398592..7a802f7a8 100644
--- a/src/tools/rustfmt/src/parse/macros/mod.rs
+++ b/src/tools/rustfmt/src/parse/macros/mod.rs
@@ -56,7 +56,7 @@ fn parse_macro_arg<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
);
parse_macro_arg!(
Pat,
- |parser: &mut rustc_parse::parser::Parser<'b>| parser.parse_pat_no_top_alt(None),
+ |parser: &mut rustc_parse::parser::Parser<'b>| parser.parse_pat_no_top_alt(None, None),
|x: ptr::P<ast::Pat>| Some(x)
);
// `parse_item` returns `Option<ptr::P<ast::Item>>`.
diff --git a/src/tools/rustfmt/src/parse/session.rs b/src/tools/rustfmt/src/parse/session.rs
index 81b5015dd..945e3e42f 100644
--- a/src/tools/rustfmt/src/parse/session.rs
+++ b/src/tools/rustfmt/src/parse/session.rs
@@ -4,7 +4,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use rustc_data_structures::sync::{Lrc, Send};
use rustc_errors::emitter::{Emitter, EmitterWriter};
use rustc_errors::translation::Translate;
-use rustc_errors::{ColorConfig, Diagnostic, Handler, Level as DiagnosticLevel, TerminalUrl};
+use rustc_errors::{ColorConfig, Diagnostic, Handler, Level as DiagnosticLevel};
use rustc_session::parse::ParseSess as RawParseSess;
use rustc_span::{
source_map::{FilePathMapping, SourceMap},
@@ -139,30 +139,15 @@ fn default_handler(
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
false,
);
- Box::new(EmitterWriter::stderr(
- emit_color,
- Some(source_map.clone()),
- None,
- fallback_bundle,
- false,
- false,
- None,
- false,
- false,
- TerminalUrl::No,
- ))
+ Box::new(EmitterWriter::stderr(emit_color, fallback_bundle).sm(Some(source_map.clone())))
};
- Handler::with_emitter(
- true,
- None,
- Box::new(SilentOnIgnoredFilesEmitter {
- has_non_ignorable_parser_errors: false,
- source_map,
- emitter,
- ignore_path_set,
- can_reset,
- }),
- )
+ Handler::with_emitter(Box::new(SilentOnIgnoredFilesEmitter {
+ has_non_ignorable_parser_errors: false,
+ source_map,
+ emitter,
+ ignore_path_set,
+ can_reset,
+ }))
}
impl ParseSess {
@@ -233,7 +218,7 @@ impl ParseSess {
}
pub(crate) fn set_silent_emitter(&mut self) {
- self.parse_sess.span_diagnostic = Handler::with_emitter(true, None, silent_emitter());
+ self.parse_sess.span_diagnostic = Handler::with_emitter(silent_emitter());
}
pub(crate) fn span_to_filename(&self, span: Span) -> FileName {
diff --git a/src/tools/rustfmt/src/test/mod.rs b/src/tools/rustfmt/src/test/mod.rs
index cfad4a8ed..37854ead2 100644
--- a/src/tools/rustfmt/src/test/mod.rs
+++ b/src/tools/rustfmt/src/test/mod.rs
@@ -838,6 +838,10 @@ fn handle_result(
// Ignore LF and CRLF difference for Windows.
if !string_eq_ignore_newline_repr(&fmt_text, &text) {
+ if std::env::var_os("RUSTC_BLESS").is_some_and(|v| v != "0") {
+ std::fs::write(file_name, fmt_text).unwrap();
+ continue;
+ }
let diff = make_diff(&text, &fmt_text, DIFF_CONTEXT_SIZE);
assert!(
!diff.is_empty(),
diff --git a/src/tools/rustfmt/src/utils.rs b/src/tools/rustfmt/src/utils.rs
index 890a05b8c..4fc5a9b68 100644
--- a/src/tools/rustfmt/src/utils.rs
+++ b/src/tools/rustfmt/src/utils.rs
@@ -441,7 +441,7 @@ pub(crate) fn left_most_sub_expr(e: &ast::Expr) -> &ast::Expr {
| ast::ExprKind::Assign(ref e, _, _)
| ast::ExprKind::AssignOp(_, ref e, _)
| ast::ExprKind::Field(ref e, _)
- | ast::ExprKind::Index(ref e, _)
+ | ast::ExprKind::Index(ref e, _, _)
| ast::ExprKind::Range(Some(ref e), _, _)
| ast::ExprKind::Try(ref e) => left_most_sub_expr(e),
_ => e,
@@ -479,7 +479,7 @@ pub(crate) fn is_block_expr(context: &RewriteContext<'_>, expr: &ast::Expr, repr
| ast::ExprKind::Match(..) => repr.contains('\n'),
ast::ExprKind::Paren(ref expr)
| ast::ExprKind::Binary(_, _, ref expr)
- | ast::ExprKind::Index(_, ref expr)
+ | ast::ExprKind::Index(_, ref expr, _)
| ast::ExprKind::Unary(_, ref expr)
| ast::ExprKind::Try(ref expr)
| ast::ExprKind::Yield(Some(ref expr)) => is_block_expr(context, expr, repr),
diff --git a/src/tools/tidy/config/black.toml b/src/tools/tidy/config/black.toml
new file mode 100644
index 000000000..51a722979
--- /dev/null
+++ b/src/tools/tidy/config/black.toml
@@ -0,0 +1,15 @@
+[tool.black]
+# Ignore all submodules
+extend-exclude = """(\
+ src/doc/nomicon|\
+ src/tools/cargo/|\
+ src/doc/reference/|\
+ src/doc/book/|\
+ src/doc/rust-by-example/|\
+ library/stdarch/|\
+ src/doc/rustc-dev-guide/|\
+ src/doc/edition-guide/|\
+ src/llvm-project/|\
+ src/doc/embedded-book/|\
+ library/backtrace/
+ )"""
diff --git a/src/tools/tidy/config/requirements.in b/src/tools/tidy/config/requirements.in
new file mode 100644
index 000000000..882e09dae
--- /dev/null
+++ b/src/tools/tidy/config/requirements.in
@@ -0,0 +1,10 @@
+# requirements.in This is the source file for our pinned version requirements
+# file "requirements.txt" To regenerate that file, pip-tools is required
+# (`python -m pip install pip-tools`). Once installed, run: `pip-compile
+# --generate-hashes src/tools/tidy/config/requirements.in`
+#
+# Note: this generation step should be run with the oldest supported python
+# version (currently 3.7) to ensure backward compatibility
+
+black==23.3.0
+ruff==0.0.272
diff --git a/src/tools/tidy/config/requirements.txt b/src/tools/tidy/config/requirements.txt
new file mode 100644
index 000000000..9fd617ad6
--- /dev/null
+++ b/src/tools/tidy/config/requirements.txt
@@ -0,0 +1,117 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+# pip-compile --generate-hashes src/tools/tidy/config/requirements.in
+#
+black==23.3.0 \
+ --hash=sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5 \
+ --hash=sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915 \
+ --hash=sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326 \
+ --hash=sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940 \
+ --hash=sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b \
+ --hash=sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30 \
+ --hash=sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c \
+ --hash=sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c \
+ --hash=sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab \
+ --hash=sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27 \
+ --hash=sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2 \
+ --hash=sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961 \
+ --hash=sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9 \
+ --hash=sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb \
+ --hash=sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70 \
+ --hash=sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331 \
+ --hash=sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2 \
+ --hash=sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266 \
+ --hash=sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d \
+ --hash=sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6 \
+ --hash=sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b \
+ --hash=sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925 \
+ --hash=sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8 \
+ --hash=sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4 \
+ --hash=sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3
+ # via -r src/tools/tidy/config/requirements.in
+click==8.1.3 \
+ --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
+ --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
+ # via black
+importlib-metadata==6.7.0 \
+ --hash=sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4 \
+ --hash=sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5
+ # via click
+mypy-extensions==1.0.0 \
+ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \
+ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782
+ # via black
+packaging==23.1 \
+ --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \
+ --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f
+ # via black
+pathspec==0.11.1 \
+ --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \
+ --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293
+ # via black
+platformdirs==3.6.0 \
+ --hash=sha256:57e28820ca8094678b807ff529196506d7a21e17156cb1cddb3e74cebce54640 \
+ --hash=sha256:ffa199e3fbab8365778c4a10e1fbf1b9cd50707de826eb304b50e57ec0cc8d38
+ # via black
+ruff==0.0.272 \
+ --hash=sha256:06b8ee4eb8711ab119db51028dd9f5384b44728c23586424fd6e241a5b9c4a3b \
+ --hash=sha256:1609b864a8d7ee75a8c07578bdea0a7db75a144404e75ef3162e0042bfdc100d \
+ --hash=sha256:19643d448f76b1eb8a764719072e9c885968971bfba872e14e7257e08bc2f2b7 \
+ --hash=sha256:273a01dc8c3c4fd4c2af7ea7a67c8d39bb09bce466e640dd170034da75d14cab \
+ --hash=sha256:27b2ea68d2aa69fff1b20b67636b1e3e22a6a39e476c880da1282c3e4bf6ee5a \
+ --hash=sha256:48eccf225615e106341a641f826b15224b8a4240b84269ead62f0afd6d7e2d95 \
+ --hash=sha256:677284430ac539bb23421a2b431b4ebc588097ef3ef918d0e0a8d8ed31fea216 \
+ --hash=sha256:691d72a00a99707a4e0b2846690961157aef7b17b6b884f6b4420a9f25cd39b5 \
+ --hash=sha256:86bc788245361a8148ff98667da938a01e1606b28a45e50ac977b09d3ad2c538 \
+ --hash=sha256:905ff8f3d6206ad56fcd70674453527b9011c8b0dc73ead27618426feff6908e \
+ --hash=sha256:9c4bfb75456a8e1efe14c52fcefb89cfb8f2a0d31ed8d804b82c6cf2dc29c42c \
+ --hash=sha256:a37ec80e238ead2969b746d7d1b6b0d31aa799498e9ba4281ab505b93e1f4b28 \
+ --hash=sha256:ae9b57546e118660175d45d264b87e9b4c19405c75b587b6e4d21e6a17bf4fdf \
+ --hash=sha256:bd2bbe337a3f84958f796c77820d55ac2db1e6753f39d1d1baed44e07f13f96d \
+ --hash=sha256:d5a208f8ef0e51d4746930589f54f9f92f84bb69a7d15b1de34ce80a7681bc00 \
+ --hash=sha256:dc406e5d756d932da95f3af082814d2467943631a587339ee65e5a4f4fbe83eb \
+ --hash=sha256:ee76b4f05fcfff37bd6ac209d1370520d509ea70b5a637bdf0a04d0c99e13dff
+ # via -r src/tools/tidy/config/requirements.in
+tomli==2.0.1 \
+ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
+ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
+ # via black
+typed-ast==1.5.4 \
+ --hash=sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2 \
+ --hash=sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1 \
+ --hash=sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6 \
+ --hash=sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62 \
+ --hash=sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac \
+ --hash=sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d \
+ --hash=sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc \
+ --hash=sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2 \
+ --hash=sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97 \
+ --hash=sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35 \
+ --hash=sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6 \
+ --hash=sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1 \
+ --hash=sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4 \
+ --hash=sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c \
+ --hash=sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e \
+ --hash=sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec \
+ --hash=sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f \
+ --hash=sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72 \
+ --hash=sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47 \
+ --hash=sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72 \
+ --hash=sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe \
+ --hash=sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6 \
+ --hash=sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3 \
+ --hash=sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66
+ # via black
+typing-extensions==4.6.3 \
+ --hash=sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26 \
+ --hash=sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5
+ # via
+ # black
+ # importlib-metadata
+ # platformdirs
+zipp==3.15.0 \
+ --hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \
+ --hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556
+ # via importlib-metadata
diff --git a/src/tools/tidy/config/ruff.toml b/src/tools/tidy/config/ruff.toml
new file mode 100644
index 000000000..cf08c6264
--- /dev/null
+++ b/src/tools/tidy/config/ruff.toml
@@ -0,0 +1,41 @@
+# Configuration for ruff python linter, run as part of tidy external tools
+
+# B (bugbear), E (pycodestyle, standard), EXE (executables) F (flakes, standard)
+# ERM for error messages would be beneficial at some point
+select = ["B", "E", "EXE", "F"]
+
+ignore = [
+ "E501", # line-too-long
+ "F403", # undefined-local-with-import-star
+ "F405", # undefined-local-with-import-star-usage
+]
+
+# lowest possible for ruff
+target-version = "py37"
+
+# Ignore all submodules
+extend-exclude = [
+ "src/doc/nomicon/",
+ "src/tools/cargo/",
+ "src/doc/reference/",
+ "src/doc/book/",
+ "src/doc/rust-by-example/",
+ "library/stdarch/",
+ "src/doc/rustc-dev-guide/",
+ "src/doc/edition-guide/",
+ "src/llvm-project/",
+ "src/doc/embedded-book/",
+ "library/backtrace/",
+ # Hack: CI runs from a subdirectory under the main checkout
+ "../src/doc/nomicon/",
+ "../src/tools/cargo/",
+ "../src/doc/reference/",
+ "../src/doc/book/",
+ "../src/doc/rust-by-example/",
+ "../library/stdarch/",
+ "../src/doc/rustc-dev-guide/",
+ "../src/doc/edition-guide/",
+ "../src/llvm-project/",
+ "../src/doc/embedded-book/",
+ "../library/backtrace/",
+]
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index ecc84c161..410852b6a 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -40,10 +40,13 @@ const EXCEPTIONS: &[(&str, &str)] = &[
("ar_archive_writer", "Apache-2.0 WITH LLVM-exception"), // rustc
("colored", "MPL-2.0"), // rustfmt
("dissimilar", "Apache-2.0"), // rustdoc, rustc_lexer (few tests) via expect-test, (dev deps)
+ ("encoding_rs", "(Apache-2.0 OR MIT) AND BSD-3-Clause"), // opt-dist
("fluent-langneg", "Apache-2.0"), // rustc (fluent translations)
("fortanix-sgx-abi", "MPL-2.0"), // libstd but only for `sgx` target. FIXME: this dependency violates the documentation comment above.
("instant", "BSD-3-Clause"), // rustc_driver/tracing-subscriber/parking_lot
("mdbook", "MPL-2.0"), // mdbook
+ ("openssl", "Apache-2.0"), // opt-dist
+ ("rustc_apfloat", "Apache-2.0 WITH LLVM-exception"), // rustc (license is the same as LLVM uses)
("ryu", "Apache-2.0 OR BSL-1.0"), // cargo/... (because of serde)
("self_cell", "Apache-2.0"), // rustc (fluent translations)
("snap", "BSD-3-Clause"), // rustc
@@ -54,6 +57,9 @@ const EXCEPTIONS_CARGO: &[(&str, &str)] = &[
// tidy-alphabetical-start
("bitmaps", "MPL-2.0+"),
("bytesize", "Apache-2.0"),
+ ("ciborium", "Apache-2.0"),
+ ("ciborium-io", "Apache-2.0"),
+ ("ciborium-ll", "Apache-2.0"),
("dunce", "CC0-1.0 OR MIT-0 OR Apache-2.0"),
("fiat-crypto", "MIT OR Apache-2.0 OR BSD-1-Clause"),
("im-rc", "MPL-2.0+"),
@@ -107,7 +113,6 @@ const PERMITTED_DEPS_LOCATION: &str = concat!(file!(), ":", line!());
/// rustc. Please check with the compiler team before adding an entry.
const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
// tidy-alphabetical-start
- "addr2line",
"adler",
"ahash",
"aho-corasick",
@@ -132,8 +137,12 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"crossbeam-utils",
"crypto-common",
"cstr",
+ "darling",
+ "darling_core",
+ "darling_macro",
"datafrog",
"derive_more",
+ "derive_setters",
"digest",
"displaydoc",
"dissimilar",
@@ -142,6 +151,8 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"elsa",
"ena",
"equivalent",
+ "errno",
+ "errno-dragonfly",
"expect-test",
"fallible-iterator", // dependency of `thorin`
"fastrand",
@@ -150,6 +161,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"fluent-bundle",
"fluent-langneg",
"fluent-syntax",
+ "fnv",
"fortanix-sgx-abi",
"generic-array",
"getopts",
@@ -163,6 +175,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"icu_provider",
"icu_provider_adapters",
"icu_provider_macros",
+ "ident_case",
"indexmap",
"instant",
"intl-memoizer",
@@ -217,6 +230,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
+ "rustc_apfloat",
"rustc_version",
"rustix",
"ruzstd", // via object in thorin-dwp
@@ -236,6 +250,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"stable_deref_trait",
"stacker",
"static_assertions",
+ "strsim",
"syn",
"synstructure",
"tempfile",
@@ -243,9 +258,14 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"termize",
"thin-vec",
"thiserror",
+ "thiserror-core",
+ "thiserror-core-impl",
"thiserror-impl",
"thorin-dwp",
"thread_local",
+ "time",
+ "time-core",
+ "time-macros",
"tinystr",
"tinyvec",
"tinyvec_macros",
@@ -258,18 +278,14 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"twox-hash",
"type-map",
"typenum",
- "unic-char-property",
- "unic-char-range",
- "unic-common",
- "unic-emoji-char",
"unic-langid",
"unic-langid-impl",
"unic-langid-macros",
"unic-langid-macros-impl",
- "unic-ucd-version",
"unicase",
"unicode-ident",
"unicode-normalization",
+ "unicode-properties",
"unicode-script",
"unicode-security",
"unicode-width",
@@ -324,6 +340,7 @@ const PERMITTED_CRANELIFT_DEPENDENCIES: &[&str] = &[
"cranelift-native",
"cranelift-object",
"crc32fast",
+ "equivalent",
"fallible-iterator",
"gimli",
"hashbrown",
diff --git a/src/tools/tidy/src/ext_tool_checks.rs b/src/tools/tidy/src/ext_tool_checks.rs
new file mode 100644
index 000000000..40e75d1d3
--- /dev/null
+++ b/src/tools/tidy/src/ext_tool_checks.rs
@@ -0,0 +1,435 @@
+//! Optional checks for file types other than Rust source
+//!
+//! Handles python tool version managment via a virtual environment in
+//! `build/venv`.
+//!
+//! # Functional outline
+//!
+//! 1. Run tidy with an extra option: `--extra-checks=py,shell`,
+//! `--extra-checks=py:lint`, or similar. Optionally provide specific
+//! configuration after a double dash (`--extra-checks=py -- foo.py`)
+//! 2. Build configuration based on args/environment:
+//! - Formatters by default are in check only mode
+//! - If in CI (TIDY_PRINT_DIFF=1 is set), check and print the diff
+//! - If `--bless` is provided, formatters may run
+//! - Pass any additional config after the `--`. If no files are specified,
+//! use a default.
+//! 3. Print the output of the given command. If it fails and `TIDY_PRINT_DIFF`
+//! is set, rerun the tool to print a suggestion diff (for e.g. CI)
+
+use std::ffi::OsStr;
+use std::fmt;
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+/// Minimum python revision is 3.7 for ruff
+const MIN_PY_REV: (u32, u32) = (3, 7);
+const MIN_PY_REV_STR: &str = "≥3.7";
+
+/// Path to find the python executable within a virtual environment
+#[cfg(target_os = "windows")]
+const REL_PY_PATH: &[&str] = &["Scripts", "python3.exe"];
+#[cfg(not(target_os = "windows"))]
+const REL_PY_PATH: &[&str] = &["bin", "python3"];
+
+const RUFF_CONFIG_PATH: &[&str] = &["src", "tools", "tidy", "config", "ruff.toml"];
+const BLACK_CONFIG_PATH: &[&str] = &["src", "tools", "tidy", "config", "black.toml"];
+/// Location within build directory
+const RUFF_CACH_PATH: &[&str] = &["cache", "ruff_cache"];
+const PIP_REQ_PATH: &[&str] = &["src", "tools", "tidy", "config", "requirements.txt"];
+
+pub fn check(
+ root_path: &Path,
+ outdir: &Path,
+ bless: bool,
+ extra_checks: Option<&str>,
+ pos_args: &[String],
+ bad: &mut bool,
+) {
+ if let Err(e) = check_impl(root_path, outdir, bless, extra_checks, pos_args) {
+ tidy_error!(bad, "{e}");
+ }
+}
+
+fn check_impl(
+ root_path: &Path,
+ outdir: &Path,
+ bless: bool,
+ extra_checks: Option<&str>,
+ pos_args: &[String],
+) -> Result<(), Error> {
+ let show_diff = std::env::var("TIDY_PRINT_DIFF")
+ .map_or(false, |v| v.eq_ignore_ascii_case("true") || v == "1");
+
+ // Split comma-separated args up
+ let lint_args = match extra_checks {
+ Some(s) => s.strip_prefix("--extra-checks=").unwrap().split(',').collect(),
+ None => vec![],
+ };
+
+ let python_all = lint_args.contains(&"py");
+ let python_lint = lint_args.contains(&"py:lint") || python_all;
+ let python_fmt = lint_args.contains(&"py:fmt") || python_all;
+ let shell_all = lint_args.contains(&"shell");
+ let shell_lint = lint_args.contains(&"shell:lint") || shell_all;
+
+ let mut py_path = None;
+
+ let (cfg_args, file_args): (Vec<_>, Vec<_>) = pos_args
+ .into_iter()
+ .map(OsStr::new)
+ .partition(|arg| arg.to_str().is_some_and(|s| s.starts_with("-")));
+
+ if python_lint || python_fmt {
+ let venv_path = outdir.join("venv");
+ let mut reqs_path = root_path.to_owned();
+ reqs_path.extend(PIP_REQ_PATH);
+ py_path = Some(get_or_create_venv(&venv_path, &reqs_path)?);
+ }
+
+ if python_lint {
+ eprintln!("linting python files");
+ let mut cfg_args_ruff = cfg_args.clone();
+ let mut file_args_ruff = file_args.clone();
+
+ let mut cfg_path = root_path.to_owned();
+ cfg_path.extend(RUFF_CONFIG_PATH);
+ let mut cache_dir = outdir.to_owned();
+ cache_dir.extend(RUFF_CACH_PATH);
+
+ cfg_args_ruff.extend([
+ "--config".as_ref(),
+ cfg_path.as_os_str(),
+ "--cache-dir".as_ref(),
+ cache_dir.as_os_str(),
+ ]);
+
+ if file_args_ruff.is_empty() {
+ file_args_ruff.push(root_path.as_os_str());
+ }
+
+ let mut args = merge_args(&cfg_args_ruff, &file_args_ruff);
+ let res = py_runner(py_path.as_ref().unwrap(), "ruff", &args);
+
+ if res.is_err() && show_diff {
+ eprintln!("\npython linting failed! Printing diff suggestions:");
+
+ args.insert(0, "--diff".as_ref());
+ let _ = py_runner(py_path.as_ref().unwrap(), "ruff", &args);
+ }
+ // Rethrow error
+ let _ = res?;
+ }
+
+ if python_fmt {
+ let mut cfg_args_black = cfg_args.clone();
+ let mut file_args_black = file_args.clone();
+
+ if bless {
+ eprintln!("formatting python files");
+ } else {
+ eprintln!("checking python file formatting");
+ cfg_args_black.push("--check".as_ref());
+ }
+
+ let mut cfg_path = root_path.to_owned();
+ cfg_path.extend(BLACK_CONFIG_PATH);
+
+ cfg_args_black.extend(["--config".as_ref(), cfg_path.as_os_str()]);
+
+ if file_args_black.is_empty() {
+ file_args_black.push(root_path.as_os_str());
+ }
+
+ let mut args = merge_args(&cfg_args_black, &file_args_black);
+ let res = py_runner(py_path.as_ref().unwrap(), "black", &args);
+
+ if res.is_err() && show_diff {
+ eprintln!("\npython formatting does not match! Printing diff:");
+
+ args.insert(0, "--diff".as_ref());
+ let _ = py_runner(py_path.as_ref().unwrap(), "black", &args);
+ }
+ // Rethrow error
+ let _ = res?;
+ }
+
+ if shell_lint {
+ eprintln!("linting shell files");
+
+ let mut file_args_shc = file_args.clone();
+ let files;
+ if file_args_shc.is_empty() {
+ files = find_with_extension(root_path, "sh")?;
+ file_args_shc.extend(files.iter().map(|p| p.as_os_str()));
+ }
+
+ shellcheck_runner(&merge_args(&cfg_args, &file_args_shc))?;
+ }
+
+ Ok(())
+}
+
+/// Helper to create `cfg1 cfg2 -- file1 file2` output
+fn merge_args<'a>(cfg_args: &[&'a OsStr], file_args: &[&'a OsStr]) -> Vec<&'a OsStr> {
+ let mut args = cfg_args.to_owned();
+ args.push("--".as_ref());
+ args.extend(file_args);
+ args
+}
+
+/// Run a python command with given arguments. `py_path` should be a virtualenv.
+fn py_runner(py_path: &Path, bin: &'static str, args: &[&OsStr]) -> Result<(), Error> {
+ let status = Command::new(py_path).arg("-m").arg(bin).args(args).status()?;
+ if status.success() { Ok(()) } else { Err(Error::FailedCheck(bin)) }
+}
+
+/// Create a virtuaenv at a given path if it doesn't already exist, or validate
+/// the install if it does. Returns the path to that venv's python executable.
+fn get_or_create_venv(venv_path: &Path, src_reqs_path: &Path) -> Result<PathBuf, Error> {
+ let mut should_create = true;
+ let dst_reqs_path = venv_path.join("requirements.txt");
+ let mut py_path = venv_path.to_owned();
+ py_path.extend(REL_PY_PATH);
+
+ if let Ok(req) = fs::read_to_string(&dst_reqs_path) {
+ if req == fs::read_to_string(src_reqs_path)? {
+ // found existing environment
+ should_create = false;
+ } else {
+ eprintln!("requirements.txt file mismatch, recreating environment");
+ }
+ }
+
+ if should_create {
+ eprintln!("removing old virtual environment");
+ if venv_path.is_dir() {
+ fs::remove_dir_all(venv_path).unwrap_or_else(|_| {
+ panic!("failed to remove directory at {}", venv_path.display())
+ });
+ }
+ create_venv_at_path(venv_path)?;
+ install_requirements(&py_path, src_reqs_path, &dst_reqs_path)?;
+ }
+
+ verify_py_version(&py_path)?;
+ Ok(py_path)
+}
+
+/// Attempt to create a virtualenv at this path. Cycles through all expected
+/// valid python versions to find one that is installed.
+fn create_venv_at_path(path: &Path) -> Result<(), Error> {
+ /// Preferred python versions in order. Newest to oldest then current
+ /// development versions
+ const TRY_PY: &[&str] = &[
+ "python3.11",
+ "python3.10",
+ "python3.9",
+ "python3.8",
+ "python3.7",
+ "python3",
+ "python",
+ "python3.12",
+ "python3.13",
+ ];
+
+ let mut sys_py = None;
+ let mut found = Vec::new();
+
+ for py in TRY_PY {
+ match verify_py_version(Path::new(py)) {
+ Ok(_) => {
+ sys_py = Some(*py);
+ break;
+ }
+ // Skip not found errors
+ Err(Error::Io(e)) if e.kind() == io::ErrorKind::NotFound => (),
+ // Skip insufficient version errors
+ Err(Error::Version { installed, .. }) => found.push(installed),
+ // just log and skip unrecognized errors
+ Err(e) => eprintln!("note: error running '{py}': {e}"),
+ }
+ }
+
+ let Some(sys_py) = sys_py else {
+ let ret = if found.is_empty() {
+ Error::MissingReq("python3", "python file checks", None)
+ } else {
+ found.sort();
+ found.dedup();
+ Error::Version {
+ program: "python3",
+ required: MIN_PY_REV_STR,
+ installed: found.join(", "),
+ }
+ };
+ return Err(ret);
+ };
+
+ eprintln!("creating virtual environment at '{}' using '{sys_py}'", path.display());
+ let out = Command::new(sys_py).args(["-m", "virtualenv"]).arg(path).output().unwrap();
+
+ if out.status.success() {
+ return Ok(());
+ }
+ let err = if String::from_utf8_lossy(&out.stderr).contains("No module named virtualenv") {
+ Error::Generic(format!(
+ "virtualenv not found: you may need to install it \
+ (`python3 -m pip install venv`)"
+ ))
+ } else {
+ Error::Generic(format!("failed to create venv at '{}' using {sys_py}", path.display()))
+ };
+ Err(err)
+}
+
+/// Parse python's version output (`Python x.y.z`) and ensure we have a
+/// suitable version.
+fn verify_py_version(py_path: &Path) -> Result<(), Error> {
+ let out = Command::new(py_path).arg("--version").output()?;
+ let outstr = String::from_utf8_lossy(&out.stdout);
+ let vers = outstr.trim().split_ascii_whitespace().nth(1).unwrap().trim();
+ let mut vers_comps = vers.split('.');
+ let major: u32 = vers_comps.next().unwrap().parse().unwrap();
+ let minor: u32 = vers_comps.next().unwrap().parse().unwrap();
+
+ if (major, minor) < MIN_PY_REV {
+ Err(Error::Version {
+ program: "python",
+ required: MIN_PY_REV_STR,
+ installed: vers.to_owned(),
+ })
+ } else {
+ Ok(())
+ }
+}
+
+fn install_requirements(
+ py_path: &Path,
+ src_reqs_path: &Path,
+ dst_reqs_path: &Path,
+) -> Result<(), Error> {
+ let stat = Command::new(py_path)
+ .args(["-m", "pip", "install", "--upgrade", "pip"])
+ .status()
+ .expect("failed to launch pip");
+ if !stat.success() {
+ return Err(Error::Generic(format!("pip install failed with status {stat}")));
+ }
+
+ let stat = Command::new(py_path)
+ .args(["-m", "pip", "install", "--require-hashes", "-r"])
+ .arg(src_reqs_path)
+ .status()?;
+ if !stat.success() {
+ return Err(Error::Generic(format!(
+ "failed to install requirements at {}",
+ src_reqs_path.display()
+ )));
+ }
+ fs::copy(src_reqs_path, dst_reqs_path)?;
+ assert_eq!(
+ fs::read_to_string(src_reqs_path).unwrap(),
+ fs::read_to_string(dst_reqs_path).unwrap()
+ );
+ Ok(())
+}
+
+/// Check that shellcheck is installed then run it at the given path
+fn shellcheck_runner(args: &[&OsStr]) -> Result<(), Error> {
+ match Command::new("shellcheck").arg("--version").status() {
+ Ok(_) => (),
+ Err(e) if e.kind() == io::ErrorKind::NotFound => {
+ return Err(Error::MissingReq(
+ "shellcheck",
+ "shell file checks",
+ Some(
+ "see <https://github.com/koalaman/shellcheck#installing> \
+ for installation instructions"
+ .to_owned(),
+ ),
+ ));
+ }
+ Err(e) => return Err(e.into()),
+ }
+
+ let status = Command::new("shellcheck").args(args).status()?;
+ if status.success() { Ok(()) } else { Err(Error::FailedCheck("black")) }
+}
+
+/// Check git for tracked files matching an extension
+fn find_with_extension(root_path: &Path, extension: &str) -> Result<Vec<PathBuf>, Error> {
+ // Untracked files show up for short status and are indicated with a leading `?`
+ // -C changes git to be as if run from that directory
+ let stat_output =
+ Command::new("git").arg("-C").arg(root_path).args(["status", "--short"]).output()?.stdout;
+
+ if String::from_utf8_lossy(&stat_output).lines().filter(|ln| ln.starts_with('?')).count() > 0 {
+ eprintln!("found untracked files, ignoring");
+ }
+
+ let mut output = Vec::new();
+ let binding = Command::new("git").arg("-C").arg(root_path).args(["ls-files"]).output()?;
+ let tracked = String::from_utf8_lossy(&binding.stdout);
+
+ for line in tracked.lines() {
+ let line = line.trim();
+ let path = Path::new(line);
+
+ if path.extension() == Some(OsStr::new(extension)) {
+ output.push(path.to_owned());
+ }
+ }
+
+ Ok(output)
+}
+
+#[derive(Debug)]
+enum Error {
+ Io(io::Error),
+ /// a is required to run b. c is extra info
+ MissingReq(&'static str, &'static str, Option<String>),
+ /// Tool x failed the check
+ FailedCheck(&'static str),
+ /// Any message, just print it
+ Generic(String),
+ /// Installed but wrong version
+ Version {
+ program: &'static str,
+ required: &'static str,
+ installed: String,
+ },
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::MissingReq(a, b, ex) => {
+ write!(
+ f,
+ "{a} is required to run {b} but it could not be located. Is it installed?"
+ )?;
+ if let Some(s) = ex {
+ write!(f, "\n{s}")?;
+ };
+ Ok(())
+ }
+ Self::Version { program, required, installed } => write!(
+ f,
+ "insufficient version of '{program}' to run external tools: \
+ {required} required but found {installed}",
+ ),
+ Self::Generic(s) => f.write_str(s),
+ Self::Io(e) => write!(f, "IO error: {e}"),
+ Self::FailedCheck(s) => write!(f, "checks with external tool '{s}' failed"),
+ }
+ }
+}
+
+impl From<io::Error> for Error {
+ fn from(value: io::Error) -> Self {
+ Self::Io(value)
+ }
+}
diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs
index 7ad8f5c5c..d900c04c1 100644
--- a/src/tools/tidy/src/features.rs
+++ b/src/tools/tidy/src/features.rs
@@ -338,6 +338,7 @@ fn collect_lang_features_in(features: &mut Features, base: &Path, file: &str, ba
let level = match parts.next().map(|l| l.trim().trim_start_matches('(')) {
Some("active") => Status::Unstable,
Some("incomplete") => Status::Unstable,
+ Some("internal") => Status::Unstable,
Some("removed") => Status::Removed,
Some("accepted") => Status::Stable,
_ => continue,
diff --git a/src/tools/tidy/src/fluent_alphabetical.rs b/src/tools/tidy/src/fluent_alphabetical.rs
index 5f8eaebf5..67b745373 100644
--- a/src/tools/tidy/src/fluent_alphabetical.rs
+++ b/src/tools/tidy/src/fluent_alphabetical.rs
@@ -23,7 +23,7 @@ fn check_alphabetic(filename: &str, fluent: &str, bad: &mut bool) {
tidy_error!(
bad,
"{filename}: message `{}` appears before `{}`, but is alphabetically later than it
-run tidy with `--bless` to sort the file correctly",
+run `./x.py test tidy --bless` to sort the file correctly",
name.as_str(),
next.as_str()
);
diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs
index e467514a7..9b19b8eec 100644
--- a/src/tools/tidy/src/lib.rs
+++ b/src/tools/tidy/src/lib.rs
@@ -57,6 +57,7 @@ pub mod debug_artifacts;
pub mod deps;
pub mod edition;
pub mod error_codes;
+pub mod ext_tool_checks;
pub mod extdeps;
pub mod features;
pub mod fluent_alphabetical;
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
index e21068490..5fa91715a 100644
--- a/src/tools/tidy/src/main.rs
+++ b/src/tools/tidy/src/main.rs
@@ -37,9 +37,14 @@ fn main() {
let librustdoc_path = src_path.join("librustdoc");
let args: Vec<String> = env::args().skip(1).collect();
-
- let verbose = args.iter().any(|s| *s == "--verbose");
- let bless = args.iter().any(|s| *s == "--bless");
+ let (cfg_args, pos_args) = match args.iter().position(|arg| arg == "--") {
+ Some(pos) => (&args[..pos], &args[pos + 1..]),
+ None => (&args[..], [].as_slice()),
+ };
+ let verbose = cfg_args.iter().any(|s| *s == "--verbose");
+ let bless = cfg_args.iter().any(|s| *s == "--bless");
+ let extra_checks =
+ cfg_args.iter().find(|s| s.starts_with("--extra-checks=")).map(String::as_str);
let bad = std::sync::Arc::new(AtomicBool::new(false));
@@ -150,6 +155,8 @@ fn main() {
r
};
check!(unstable_book, &src_path, collected);
+
+ check!(ext_tool_checks, &root_path, &output_directory, bless, extra_checks, pos_args);
});
if bad.load(Ordering::Relaxed) {
diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs
index 6fd41e833..3a4d9c53d 100644
--- a/src/tools/tidy/src/pal.rs
+++ b/src/tools/tidy/src/pal.rs
@@ -39,7 +39,6 @@ const EXCEPTION_PATHS: &[&str] = &[
"library/panic_unwind",
"library/unwind",
"library/rtstartup", // Not sure what to do about this. magic stuff for mingw
- "library/term", // Not sure how to make this crate portable, but test crate needs it.
"library/test", // Probably should defer to unstable `std::sys` APIs.
// The `VaList` implementation must have platform specific code.
// The Windows implementation of a `va_list` is always a character
@@ -53,13 +52,10 @@ const EXCEPTION_PATHS: &[&str] = &[
// FIXME: platform-specific code should be moved to `sys`
"library/std/src/io/copy.rs",
"library/std/src/io/stdio.rs",
- "library/std/src/f32.rs",
- "library/std/src/f64.rs",
+ "library/std/src/lib.rs", // for miniz_oxide leaking docs, which itself workaround
"library/std/src/path.rs",
"library/std/src/sys_common", // Should only contain abstractions over platforms
"library/std/src/net/test.rs", // Utility helpers for tests
- "library/std/src/personality.rs",
- "library/std/src/personality/",
];
pub fn check(path: &Path, bad: &mut bool) {
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
index d0257d716..11480e2be 100644
--- a/src/tools/tidy/src/style.rs
+++ b/src/tools/tidy/src/style.rs
@@ -302,10 +302,6 @@ pub fn check(path: &Path, bad: &mut bool) {
return;
}
}
- // apfloat shouldn't be changed because of license problems
- if is_in(file, "compiler", "rustc_apfloat") {
- return;
- }
let mut skip_cr = contains_ignore_directive(can_contain, &contents, "cr");
let mut skip_undocumented_unsafe =
contains_ignore_directive(can_contain, &contents, "undocumented-unsafe");
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
index 55bf38110..341492400 100644
--- a/src/tools/tidy/src/ui_tests.rs
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -10,8 +10,8 @@ use std::path::{Path, PathBuf};
const ENTRY_LIMIT: usize = 900;
// FIXME: The following limits should be reduced eventually.
-const ISSUES_ENTRY_LIMIT: usize = 1896;
-const ROOT_ENTRY_LIMIT: usize = 870;
+const ISSUES_ENTRY_LIMIT: usize = 1891;
+const ROOT_ENTRY_LIMIT: usize = 866;
const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[
"rs", // test source files
@@ -100,7 +100,7 @@ pub fn check(path: &Path, bad: &mut bool) {
{
tidy_error!(bad, "file {} has unexpected extension {}", file_path.display(), ext);
}
- if ext == "stderr" || ext == "stdout" {
+ if ext == "stderr" || ext == "stdout" || ext == "fixed" {
// Test output filenames have one of the formats:
// ```
// $testname.stderr
@@ -116,7 +116,9 @@ pub fn check(path: &Path, bad: &mut bool) {
// must strip all of them.
let testname =
file_path.file_name().unwrap().to_str().unwrap().split_once('.').unwrap().0;
- if !file_path.with_file_name(testname).with_extension("rs").exists() {
+ if !file_path.with_file_name(testname).with_extension("rs").exists()
+ && !testname.contains("ignore-tidy")
+ {
tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path);
}